[ 546.920082] env[69367]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=69367) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 546.920509] env[69367]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=69367) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 546.920509] env[69367]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=69367) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 546.920835] env[69367]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 547.029103] env[69367]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=69367) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:349}} [ 547.039375] env[69367]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.010s {{(pid=69367) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:372}} [ 547.086131] env[69367]: INFO oslo_service.periodic_task [-] Skipping periodic task _heal_instance_info_cache because its interval is negative [ 547.650890] env[69367]: INFO nova.virt.driver [None req-41ed4773-557a-4345-9347-97da30c30e8b None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 547.726559] env[69367]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 547.726738] env[69367]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 547.726847] env[69367]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=69367) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 550.851914] env[69367]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-8c3e9e10-1133-45d9-83c7-db25d122f141 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.867520] env[69367]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=69367) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 550.867655] env[69367]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-353d7ef4-e369-477c-a199-3c42db9a5343 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.894130] env[69367]: INFO oslo_vmware.api [-] Successfully established new session; session ID is a8991. [ 550.894285] env[69367]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.168s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 550.894852] env[69367]: INFO nova.virt.vmwareapi.driver [None req-41ed4773-557a-4345-9347-97da30c30e8b None None] VMware vCenter version: 7.0.3 [ 550.898227] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3c620c0-54cf-4fc8-ad2d-1a1809c6962e {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.915681] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-230822aa-5684-4a93-8f90-e08be4845ef0 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.922172] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d9b773a-2dd8-4033-af77-1876d09bcff6 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.929277] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f470e05b-5af5-4b41-a775-08edee30e852 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.943092] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88ef98a7-2f79-479e-804a-6278de00fd60 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.949783] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd725bb5-4829-410c-9183-94465b1fbae5 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.979394] env[69367]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-bd0a2d6a-a0f4-4f05-9264-1ddf5c86e7d4 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.984987] env[69367]: DEBUG nova.virt.vmwareapi.driver [None req-41ed4773-557a-4345-9347-97da30c30e8b None None] Extension org.openstack.compute already exists. {{(pid=69367) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:228}} [ 550.988098] env[69367]: INFO nova.compute.provider_config [None req-41ed4773-557a-4345-9347-97da30c30e8b None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 551.490966] env[69367]: DEBUG nova.context [None req-41ed4773-557a-4345-9347-97da30c30e8b None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),99efbe51-8783-4274-b7c9-f81dc5286a30(cell1) {{(pid=69367) load_cells /opt/stack/nova/nova/context.py:472}} [ 551.491263] env[69367]: INFO nova.utils [None req-41ed4773-557a-4345-9347-97da30c30e8b None None] The cell worker thread pool MainProcess.cell_worker is initialized [ 551.493415] env[69367]: DEBUG oslo_concurrency.lockutils [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 551.493636] env[69367]: DEBUG oslo_concurrency.lockutils [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 551.494347] env[69367]: DEBUG oslo_concurrency.lockutils [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 551.494801] env[69367]: DEBUG oslo_concurrency.lockutils [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] Acquiring lock "99efbe51-8783-4274-b7c9-f81dc5286a30" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 551.494997] env[69367]: DEBUG oslo_concurrency.lockutils [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] Lock "99efbe51-8783-4274-b7c9-f81dc5286a30" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 551.496009] env[69367]: DEBUG oslo_concurrency.lockutils [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] Lock "99efbe51-8783-4274-b7c9-f81dc5286a30" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 551.516555] env[69367]: INFO dbcounter [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] Registered counter for database nova_cell0 [ 551.525329] env[69367]: INFO dbcounter [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] Registered counter for database nova_cell1 [ 551.528904] env[69367]: DEBUG oslo_db.sqlalchemy.engines [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=69367) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:397}} [ 551.529327] env[69367]: DEBUG oslo_db.sqlalchemy.engines [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=69367) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:397}} [ 551.534683] env[69367]: ERROR nova.db.main.api [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenpool.py", line 87, in _spawn_n_impl [ 551.534683] env[69367]: func(*args, **kwargs) [ 551.534683] env[69367]: File "/opt/stack/data/venv/lib/python3.10/site-packages/futurist/_green.py", line 69, in __call__ [ 551.534683] env[69367]: self.work.run() [ 551.534683] env[69367]: File "/opt/stack/data/venv/lib/python3.10/site-packages/futurist/_utils.py", line 45, in run [ 551.534683] env[69367]: result = self.fn(*self.args, **self.kwargs) [ 551.534683] env[69367]: File "/opt/stack/nova/nova/utils.py", line 695, in context_wrapper [ 551.534683] env[69367]: return func(*args, **kwargs) [ 551.534683] env[69367]: File "/opt/stack/nova/nova/context.py", line 420, in gather_result [ 551.534683] env[69367]: result = fn(*args, **kwargs) [ 551.534683] env[69367]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 551.534683] env[69367]: return f(*args, **kwargs) [ 551.534683] env[69367]: File "/opt/stack/nova/nova/objects/service.py", line 557, in _db_service_get_minimum_version [ 551.534683] env[69367]: return db.service_get_minimum_version(context, binaries) [ 551.534683] env[69367]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 551.534683] env[69367]: _check_db_access() [ 551.534683] env[69367]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 551.534683] env[69367]: stacktrace = ''.join(traceback.format_stack()) [ 551.534683] env[69367]: [ 551.535615] env[69367]: ERROR nova.db.main.api [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenpool.py", line 87, in _spawn_n_impl [ 551.535615] env[69367]: func(*args, **kwargs) [ 551.535615] env[69367]: File "/opt/stack/data/venv/lib/python3.10/site-packages/futurist/_green.py", line 69, in __call__ [ 551.535615] env[69367]: self.work.run() [ 551.535615] env[69367]: File "/opt/stack/data/venv/lib/python3.10/site-packages/futurist/_utils.py", line 45, in run [ 551.535615] env[69367]: result = self.fn(*self.args, **self.kwargs) [ 551.535615] env[69367]: File "/opt/stack/nova/nova/utils.py", line 695, in context_wrapper [ 551.535615] env[69367]: return func(*args, **kwargs) [ 551.535615] env[69367]: File "/opt/stack/nova/nova/context.py", line 420, in gather_result [ 551.535615] env[69367]: result = fn(*args, **kwargs) [ 551.535615] env[69367]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 551.535615] env[69367]: return f(*args, **kwargs) [ 551.535615] env[69367]: File "/opt/stack/nova/nova/objects/service.py", line 557, in _db_service_get_minimum_version [ 551.535615] env[69367]: return db.service_get_minimum_version(context, binaries) [ 551.535615] env[69367]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 551.535615] env[69367]: _check_db_access() [ 551.535615] env[69367]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 551.535615] env[69367]: stacktrace = ''.join(traceback.format_stack()) [ 551.535615] env[69367]: [ 551.536122] env[69367]: WARNING nova.objects.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 551.536280] env[69367]: WARNING nova.objects.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] Failed to get minimum service version for cell 99efbe51-8783-4274-b7c9-f81dc5286a30 [ 551.536733] env[69367]: DEBUG oslo_concurrency.lockutils [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] Acquiring lock "singleton_lock" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 551.536899] env[69367]: DEBUG oslo_concurrency.lockutils [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] Acquired lock "singleton_lock" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 551.537163] env[69367]: DEBUG oslo_concurrency.lockutils [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] Releasing lock "singleton_lock" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 551.537504] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] Full set of CONF: {{(pid=69367) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/service.py:357}} [ 551.537651] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] ******************************************************************************** {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2804}} [ 551.537782] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] Configuration options gathered from: {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2805}} [ 551.537917] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2806}} [ 551.538121] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2807}} [ 551.538250] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] ================================================================================ {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2809}} [ 551.538455] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] allow_resize_to_same_host = True {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.538626] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] arq_binding_timeout = 300 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.538757] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] backdoor_port = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.538886] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] backdoor_socket = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.539060] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] block_device_allocate_retries = 60 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.539227] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] block_device_allocate_retries_interval = 3 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.539386] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cell_worker_thread_pool_size = 5 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.539569] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cert = self.pem {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.539767] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.539942] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] compute_monitors = [] {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.540127] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] config_dir = [] {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.540300] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] config_drive_format = iso9660 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.540434] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.540599] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] config_source = [] {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.540767] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] console_host = devstack {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.540933] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] control_exchange = nova {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.541102] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cpu_allocation_ratio = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.541265] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] daemon = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.541429] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] debug = True {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.541737] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] default_access_ip_network_name = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.541926] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] default_availability_zone = nova {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.542102] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] default_ephemeral_format = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.542298] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] default_green_pool_size = 1000 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.542541] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.542706] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] default_schedule_zone = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.542894] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] disk_allocation_ratio = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.543083] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] enable_new_services = True {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.543272] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] enabled_apis = ['osapi_compute'] {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.543440] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] enabled_ssl_apis = [] {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.543600] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] flat_injected = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.543761] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] force_config_drive = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.543921] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] force_raw_images = True {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.544105] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] graceful_shutdown_timeout = 5 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.544270] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] heal_instance_info_cache_interval = -1 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.544559] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] host = cpu-1 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.544765] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] initial_cpu_allocation_ratio = 4.0 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.544936] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] initial_disk_allocation_ratio = 1.0 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.545112] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] initial_ram_allocation_ratio = 1.0 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.545339] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.545505] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] instance_build_timeout = 0 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.545665] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] instance_delete_interval = 300 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.545836] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] instance_format = [instance: %(uuid)s] {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.546008] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] instance_name_template = instance-%08x {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.546181] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] instance_usage_audit = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.546352] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] instance_usage_audit_period = month {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.546518] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.546683] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] instances_path = /opt/stack/data/nova/instances {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.546848] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] internal_service_availability_zone = internal {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.547030] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] key = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.547185] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] live_migration_retry_count = 30 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.547355] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] log_color = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.547524] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] log_config_append = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.547692] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.547855] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] log_dir = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.548023] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] log_file = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.548157] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] log_options = True {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.548319] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] log_rotate_interval = 1 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.548489] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] log_rotate_interval_type = days {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.548655] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] log_rotation_type = none {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.548785] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.548910] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.549089] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.549259] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.549387] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.549556] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] long_rpc_timeout = 1800 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.549746] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] max_concurrent_builds = 10 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.549913] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] max_concurrent_live_migrations = 1 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.550085] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] max_concurrent_snapshots = 5 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.550247] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] max_local_block_devices = 3 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.550407] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] max_logfile_count = 30 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.550563] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] max_logfile_size_mb = 200 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.550723] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] maximum_instance_delete_attempts = 5 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.550894] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] metadata_listen = 0.0.0.0 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.551076] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] metadata_listen_port = 8775 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.551254] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] metadata_workers = 2 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.551419] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] migrate_max_retries = -1 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.551587] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] mkisofs_cmd = genisoimage {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.551959] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] my_block_storage_ip = 10.180.1.21 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.552112] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] my_ip = 10.180.1.21 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.552329] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] my_shared_fs_storage_ip = 10.180.1.21 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.552496] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] network_allocate_retries = 0 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.552681] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.552874] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] osapi_compute_listen = 0.0.0.0 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.553059] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] osapi_compute_listen_port = 8774 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.553237] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] osapi_compute_unique_server_name_scope = {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.553412] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] osapi_compute_workers = 2 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.553574] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] password_length = 12 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.553735] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] periodic_enable = True {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.553899] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] periodic_fuzzy_delay = 60 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.554115] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] pointer_model = usbtablet {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.554300] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] preallocate_images = none {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.554467] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] publish_errors = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.554598] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] pybasedir = /opt/stack/nova {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.554757] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] ram_allocation_ratio = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.554921] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] rate_limit_burst = 0 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.555100] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] rate_limit_except_level = CRITICAL {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.555263] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] rate_limit_interval = 0 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.555424] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] reboot_timeout = 0 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.555583] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] reclaim_instance_interval = 0 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.555747] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] record = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.555916] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] reimage_timeout_per_gb = 60 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.556100] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] report_interval = 120 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.556269] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] rescue_timeout = 0 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.556430] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] reserved_host_cpus = 0 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.556590] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] reserved_host_disk_mb = 0 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.556752] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] reserved_host_memory_mb = 512 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.556919] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] reserved_huge_pages = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.557090] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] resize_confirm_window = 0 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.557279] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] resize_fs_using_block_device = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.557419] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] resume_guests_state_on_host_boot = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.557585] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.557745] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] rpc_response_timeout = 60 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.557906] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] run_external_periodic_tasks = True {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.558082] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] running_deleted_instance_action = reap {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.558243] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] running_deleted_instance_poll_interval = 1800 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.558402] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] running_deleted_instance_timeout = 0 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.558560] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] scheduler_instance_sync_interval = 120 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.558730] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] service_down_time = 720 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.558899] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] servicegroup_driver = db {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.559083] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] shell_completion = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.559250] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] shelved_offload_time = 0 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.559409] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] shelved_poll_interval = 3600 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.559599] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] shutdown_timeout = 0 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.559783] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] source_is_ipv6 = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 551.559950] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] ssl_only = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 552.025095] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 552.025460] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] sync_power_state_interval = 600 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 552.025586] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] sync_power_state_pool_size = 1000 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 552.025724] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] syslog_log_facility = LOG_USER {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 552.025891] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] tempdir = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 552.026072] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] timeout_nbd = 10 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 552.026257] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] transport_url = **** {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 552.026427] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] update_resources_interval = 0 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 552.026596] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] use_cow_images = True {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 552.026759] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] use_journal = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 552.026926] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] use_json = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 552.027104] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] use_rootwrap_daemon = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 552.027306] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] use_stderr = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 552.027479] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] use_syslog = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 552.027641] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vcpu_pin_set = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 552.027816] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vif_plugging_is_fatal = True {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 552.027997] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vif_plugging_timeout = 300 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 552.028182] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] virt_mkfs = [] {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 552.028349] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] volume_usage_poll_interval = 0 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 552.028513] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] watch_log_file = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 552.028687] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] web = /usr/share/spice-html5 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 552.028884] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] os_brick.lock_path = /opt/stack/data/n-cpu-1 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.029068] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.029243] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] os_brick.wait_mpath_device_interval = 1 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.029418] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_concurrency.disable_process_locking = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.029789] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.029983] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.030172] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.030354] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_messaging_metrics.metrics_process_name = {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.030516] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.030681] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.030871] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] api.auth_strategy = keystone {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.031048] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] api.compute_link_prefix = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.031233] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.031410] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] api.dhcp_domain = novalocal {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.031582] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] api.enable_instance_password = True {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.031788] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] api.glance_link_prefix = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.031961] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.032151] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] api.instance_list_cells_batch_strategy = distributed {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.032319] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] api.instance_list_per_project_cells = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.032484] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] api.list_records_by_skipping_down_cells = True {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.032723] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] api.local_metadata_per_cell = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.032881] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] api.max_limit = 1000 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.033069] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] api.metadata_cache_expiration = 15 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.033250] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] api.neutron_default_tenant_id = default {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.033430] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] api.response_validation = warn {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.033602] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] api.use_neutron_default_nets = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.033769] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.033935] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] api.vendordata_dynamic_failure_fatal = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.034117] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.034298] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] api.vendordata_dynamic_ssl_certfile = {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.034469] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] api.vendordata_dynamic_targets = [] {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.034633] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] api.vendordata_jsonfile_path = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.034817] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] api.vendordata_providers = ['StaticJSON'] {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.035026] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cache.backend = dogpile.cache.memcached {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.035205] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cache.backend_argument = **** {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.035368] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cache.backend_expiration_time = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.035541] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cache.config_prefix = cache.oslo {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.035745] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cache.dead_timeout = 60.0 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.035921] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cache.debug_cache_backend = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.036104] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cache.enable_retry_client = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.036266] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cache.enable_socket_keepalive = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.036436] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cache.enabled = True {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.036604] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cache.enforce_fips_mode = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.036770] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cache.expiration_time = 600 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.036934] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cache.hashclient_retry_attempts = 2 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.037115] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cache.hashclient_retry_delay = 1.0 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.037293] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cache.memcache_dead_retry = 300 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.037454] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cache.memcache_password = **** {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.037621] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.037786] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.037954] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cache.memcache_pool_maxsize = 10 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.038133] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cache.memcache_pool_unused_timeout = 60 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.038300] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cache.memcache_sasl_enabled = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.038484] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cache.memcache_servers = ['localhost:11211'] {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.038669] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cache.memcache_socket_timeout = 1.0 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.038852] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cache.memcache_username = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.039033] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cache.proxies = [] {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.039208] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cache.redis_db = 0 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.039372] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cache.redis_password = **** {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.039547] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cache.redis_sentinel_service_name = mymaster {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.039726] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.039898] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cache.redis_server = localhost:6379 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.040083] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cache.redis_socket_timeout = 1.0 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.040246] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cache.redis_username = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.040415] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cache.retry_attempts = 2 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.040579] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cache.retry_delay = 0.0 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.040744] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cache.socket_keepalive_count = 1 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.040906] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cache.socket_keepalive_idle = 1 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.041081] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cache.socket_keepalive_interval = 1 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.041245] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cache.tls_allowed_ciphers = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.041406] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cache.tls_cafile = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.041564] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cache.tls_certfile = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.041758] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cache.tls_enabled = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.041937] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cache.tls_keyfile = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.042126] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cinder.auth_section = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.042309] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cinder.auth_type = password {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.042476] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cinder.cafile = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.042678] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cinder.catalog_info = volumev3::publicURL {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.042826] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cinder.certfile = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.042995] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cinder.collect_timing = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.043177] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cinder.cross_az_attach = True {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.043345] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cinder.debug = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.043511] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cinder.endpoint_template = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.043678] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cinder.http_retries = 3 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.043844] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cinder.insecure = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.044024] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cinder.keyfile = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.044200] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cinder.os_region_name = RegionOne {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.044367] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cinder.split_loggers = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.044529] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cinder.timeout = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.044734] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.044917] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] compute.cpu_dedicated_set = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.045095] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] compute.cpu_shared_set = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.045270] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] compute.image_type_exclude_list = [] {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.045437] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] compute.live_migration_wait_for_vif_plug = True {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.045605] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] compute.max_concurrent_disk_ops = 0 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.045771] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] compute.max_disk_devices_to_attach = -1 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.045937] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.046123] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.046292] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] compute.resource_provider_association_refresh = 300 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.046456] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.046618] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] compute.shutdown_retry_interval = 10 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.046808] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.046986] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] conductor.workers = 2 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.047182] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] console.allowed_origins = [] {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.047346] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] console.ssl_ciphers = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.047518] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] console.ssl_minimum_version = default {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.047703] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] consoleauth.enforce_session_timeout = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.047895] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] consoleauth.token_ttl = 600 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.048081] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cyborg.cafile = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.048245] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cyborg.certfile = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.048411] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cyborg.collect_timing = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.048573] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cyborg.connect_retries = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.048735] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cyborg.connect_retry_delay = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.048896] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cyborg.endpoint_override = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.049071] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cyborg.insecure = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.049236] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cyborg.keyfile = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.049400] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cyborg.max_version = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.049563] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cyborg.min_version = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.049724] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cyborg.region_name = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.049887] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cyborg.retriable_status_codes = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.050056] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cyborg.service_name = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.050233] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cyborg.service_type = accelerator {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.050398] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cyborg.split_loggers = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.050560] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cyborg.status_code_retries = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.050734] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cyborg.status_code_retry_delay = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.050915] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cyborg.timeout = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.051113] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.051280] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] cyborg.version = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.051456] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] database.asyncio_connection = **** {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.051621] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] database.asyncio_slave_connection = **** {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.051873] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] database.backend = sqlalchemy {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.052103] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] database.connection = **** {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.052283] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] database.connection_debug = 0 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.052457] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] database.connection_parameters = {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.052627] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] database.connection_recycle_time = 3600 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.052794] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] database.connection_trace = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.052960] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] database.db_inc_retry_interval = True {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.053143] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] database.db_max_retries = 20 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.053312] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] database.db_max_retry_interval = 10 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.053476] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] database.db_retry_interval = 1 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.053641] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] database.max_overflow = 50 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.053830] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] database.max_pool_size = 5 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.054041] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] database.max_retries = 10 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.054236] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] database.mysql_sql_mode = TRADITIONAL {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.054404] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] database.mysql_wsrep_sync_wait = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.054567] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] database.pool_timeout = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.054734] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] database.retry_interval = 10 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.054897] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] database.slave_connection = **** {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.055072] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] database.sqlite_synchronous = True {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.055242] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] database.use_db_reconnect = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.055414] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] api_database.asyncio_connection = **** {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.055577] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] api_database.asyncio_slave_connection = **** {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.055750] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] api_database.backend = sqlalchemy {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.055922] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] api_database.connection = **** {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.056104] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] api_database.connection_debug = 0 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.056283] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] api_database.connection_parameters = {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.056451] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] api_database.connection_recycle_time = 3600 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.056616] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] api_database.connection_trace = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.056810] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] api_database.db_inc_retry_interval = True {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.056988] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] api_database.db_max_retries = 20 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.057173] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] api_database.db_max_retry_interval = 10 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.057340] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] api_database.db_retry_interval = 1 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.057506] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] api_database.max_overflow = 50 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.057671] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] api_database.max_pool_size = 5 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.057842] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] api_database.max_retries = 10 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.058016] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.058187] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] api_database.mysql_wsrep_sync_wait = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.058346] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] api_database.pool_timeout = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.058509] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] api_database.retry_interval = 10 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.058667] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] api_database.slave_connection = **** {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.058830] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] api_database.sqlite_synchronous = True {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.059015] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] devices.enabled_mdev_types = [] {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.059248] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.059450] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] ephemeral_storage_encryption.default_format = luks {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.059631] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] ephemeral_storage_encryption.enabled = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.059862] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] ephemeral_storage_encryption.key_size = 512 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.060068] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] glance.api_servers = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.060243] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] glance.cafile = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.060413] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] glance.certfile = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.060582] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] glance.collect_timing = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.060746] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] glance.connect_retries = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.060911] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] glance.connect_retry_delay = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.061091] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] glance.debug = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.061266] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] glance.default_trusted_certificate_ids = [] {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.061433] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] glance.enable_certificate_validation = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.061600] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] glance.enable_rbd_download = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.061791] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] glance.endpoint_override = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.061971] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] glance.insecure = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.062174] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] glance.keyfile = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.062354] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] glance.max_version = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.062515] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] glance.min_version = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.062685] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] glance.num_retries = 3 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.062856] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] glance.rbd_ceph_conf = {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.063029] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] glance.rbd_connect_timeout = 5 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.063209] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] glance.rbd_pool = {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.063391] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] glance.rbd_user = {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.063556] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] glance.region_name = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.063721] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] glance.retriable_status_codes = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.063885] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] glance.service_name = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.064093] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] glance.service_type = image {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.064274] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] glance.split_loggers = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.064439] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] glance.status_code_retries = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.064600] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] glance.status_code_retry_delay = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.064761] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] glance.timeout = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.064950] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.065143] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] glance.verify_glance_signatures = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.065370] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] glance.version = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.065557] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] guestfs.debug = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.065733] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] manila.auth_section = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.065906] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] manila.auth_type = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.066085] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] manila.cafile = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.066252] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] manila.certfile = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.066419] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] manila.collect_timing = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.066581] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] manila.connect_retries = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.066742] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] manila.connect_retry_delay = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.066907] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] manila.endpoint_override = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.067090] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] manila.insecure = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.067296] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] manila.keyfile = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.067500] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] manila.max_version = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.067700] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] manila.min_version = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.067914] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] manila.region_name = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.068143] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] manila.retriable_status_codes = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.068370] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] manila.service_name = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.068596] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] manila.service_type = shared-file-system {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.068815] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] manila.share_apply_policy_timeout = 10 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.069055] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] manila.split_loggers = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.069278] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] manila.status_code_retries = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.069495] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] manila.status_code_retry_delay = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.069706] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] manila.timeout = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.069937] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] manila.valid_interfaces = ['internal', 'public'] {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.070160] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] manila.version = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.070400] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] mks.enabled = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.070792] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.070998] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] image_cache.manager_interval = 2400 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.071188] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] image_cache.precache_concurrency = 1 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.071366] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] image_cache.remove_unused_base_images = True {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.071556] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.071811] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.071999] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] image_cache.subdirectory_name = _base {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.072201] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] ironic.api_max_retries = 60 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.072373] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] ironic.api_retry_interval = 2 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.072539] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] ironic.auth_section = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.072709] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] ironic.auth_type = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.072866] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] ironic.cafile = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.073041] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] ironic.certfile = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.073213] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] ironic.collect_timing = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.073380] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] ironic.conductor_group = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.073557] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] ironic.connect_retries = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.073757] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] ironic.connect_retry_delay = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.073927] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] ironic.endpoint_override = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.074134] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] ironic.insecure = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.074306] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] ironic.keyfile = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.074471] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] ironic.max_version = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.074633] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] ironic.min_version = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.074802] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] ironic.peer_list = [] {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.074967] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] ironic.region_name = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.075144] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] ironic.retriable_status_codes = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.075314] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] ironic.serial_console_state_timeout = 10 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.075476] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] ironic.service_name = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.075649] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] ironic.service_type = baremetal {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.075812] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] ironic.shard = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.075976] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] ironic.split_loggers = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.076152] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] ironic.status_code_retries = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.076315] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] ironic.status_code_retry_delay = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.076476] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] ironic.timeout = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.076661] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.076831] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] ironic.version = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.077023] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.077206] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] key_manager.fixed_key = **** {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.077392] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.077558] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] barbican.barbican_api_version = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.077722] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] barbican.barbican_endpoint = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.077896] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] barbican.barbican_endpoint_type = public {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.078081] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] barbican.barbican_region_name = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.078247] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] barbican.cafile = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.078411] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] barbican.certfile = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.078576] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] barbican.collect_timing = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.078738] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] barbican.insecure = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.078900] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] barbican.keyfile = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.079078] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] barbican.number_of_retries = 60 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.079246] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] barbican.retry_delay = 1 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.079408] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] barbican.send_service_user_token = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.079580] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] barbican.split_loggers = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.079745] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] barbican.timeout = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.079911] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] barbican.verify_ssl = True {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.080084] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] barbican.verify_ssl_path = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.080257] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] barbican_service_user.auth_section = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.080423] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] barbican_service_user.auth_type = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.080583] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] barbican_service_user.cafile = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.080743] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] barbican_service_user.certfile = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.080910] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] barbican_service_user.collect_timing = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.081088] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] barbican_service_user.insecure = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.081255] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] barbican_service_user.keyfile = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.081421] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] barbican_service_user.split_loggers = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.081584] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] barbican_service_user.timeout = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.081785] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vault.approle_role_id = **** {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.081961] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vault.approle_secret_id = **** {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.082153] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vault.kv_mountpoint = secret {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.082318] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vault.kv_path = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.082485] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vault.kv_version = 2 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.082650] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vault.namespace = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.082828] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vault.root_token_id = **** {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.082975] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vault.ssl_ca_crt_file = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.083164] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vault.timeout = 60.0 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.083333] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vault.use_ssl = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.083506] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.083678] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] keystone.cafile = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.083844] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] keystone.certfile = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.084047] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] keystone.collect_timing = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.084237] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] keystone.connect_retries = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.084404] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] keystone.connect_retry_delay = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.084568] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] keystone.endpoint_override = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.084735] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] keystone.insecure = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.084902] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] keystone.keyfile = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.085080] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] keystone.max_version = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.085251] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] keystone.min_version = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.085416] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] keystone.region_name = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.085578] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] keystone.retriable_status_codes = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.085739] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] keystone.service_name = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.085912] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] keystone.service_type = identity {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.086086] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] keystone.split_loggers = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.086252] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] keystone.status_code_retries = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.086412] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] keystone.status_code_retry_delay = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.086571] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] keystone.timeout = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.086758] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.086931] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] keystone.version = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.087140] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.ceph_mount_options = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.087492] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.ceph_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.087679] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.connection_uri = {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.087848] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.cpu_mode = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.088025] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.cpu_model_extra_flags = [] {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.088203] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.cpu_models = [] {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.088378] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.cpu_power_governor_high = performance {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.088547] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.cpu_power_governor_low = powersave {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.088716] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.cpu_power_management = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.088895] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.089076] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.device_detach_attempts = 8 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.089250] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.device_detach_timeout = 20 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.089416] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.disk_cachemodes = [] {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.089578] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.disk_prefix = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.089742] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.enabled_perf_events = [] {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.089908] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.file_backed_memory = 0 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.090088] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.gid_maps = [] {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.090252] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.hw_disk_discard = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.090411] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.hw_machine_type = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.090578] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.images_rbd_ceph_conf = {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.090746] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.090912] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.091095] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.images_rbd_glance_store_name = {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.091270] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.images_rbd_pool = rbd {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.091441] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.images_type = default {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.091601] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.images_volume_group = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.091789] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.inject_key = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.091962] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.inject_partition = -2 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.092141] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.inject_password = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.092304] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.iscsi_iface = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.092471] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.iser_use_multipath = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.092643] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.live_migration_bandwidth = 0 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.092841] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.live_migration_completion_timeout = 800 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.093023] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.live_migration_downtime = 500 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.093197] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.live_migration_downtime_delay = 75 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.093363] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.live_migration_downtime_steps = 10 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.093525] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.live_migration_inbound_addr = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.093689] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.live_migration_permit_auto_converge = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.093983] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.live_migration_permit_post_copy = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.094194] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.live_migration_scheme = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.094380] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.live_migration_timeout_action = abort {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.094555] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.live_migration_tunnelled = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.094719] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.live_migration_uri = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.094953] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.live_migration_with_native_tls = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.095168] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.max_queues = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.095344] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.mem_stats_period_seconds = 10 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.095587] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.095758] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.nfs_mount_options = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.096076] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.096261] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.num_aoe_discover_tries = 3 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.096432] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.num_iser_scan_tries = 5 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.096598] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.num_memory_encrypted_guests = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.096768] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.num_nvme_discover_tries = 5 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.096937] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.num_pcie_ports = 0 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.097122] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.num_volume_scan_tries = 5 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.097296] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.pmem_namespaces = [] {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.097459] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.quobyte_client_cfg = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.097759] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.097938] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.rbd_connect_timeout = 5 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.098118] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.098288] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.098450] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.rbd_secret_uuid = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.098609] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.rbd_user = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.098776] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.realtime_scheduler_priority = 1 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.098953] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.remote_filesystem_transport = ssh {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.099130] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.rescue_image_id = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.099292] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.rescue_kernel_id = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.099453] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.rescue_ramdisk_id = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.099625] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.rng_dev_path = /dev/urandom {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.099785] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.rx_queue_size = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.099973] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.smbfs_mount_options = {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.100311] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.100494] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.snapshot_compression = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.100664] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.snapshot_image_format = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.100926] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.101132] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.sparse_logical_volumes = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.101308] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.swtpm_enabled = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.101487] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.swtpm_group = tss {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.101680] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.swtpm_user = tss {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.101867] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.sysinfo_serial = unique {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.102044] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.tb_cache_size = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.102214] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.tx_queue_size = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.102382] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.uid_maps = [] {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.102547] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.use_virtio_for_bridges = True {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.102721] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.virt_type = kvm {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.102897] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.volume_clear = zero {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.103075] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.volume_clear_size = 0 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.103246] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.volume_enforce_multipath = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.103418] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.volume_use_multipath = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.103579] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.vzstorage_cache_path = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.103749] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.103923] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.vzstorage_mount_group = qemu {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.104104] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.vzstorage_mount_opts = [] {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.104279] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.104962] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.105161] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.vzstorage_mount_user = stack {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.105335] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.105514] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] neutron.auth_section = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.105690] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] neutron.auth_type = password {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.105857] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] neutron.cafile = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.106027] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] neutron.certfile = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.106199] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] neutron.collect_timing = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.106360] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] neutron.connect_retries = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.106525] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] neutron.connect_retry_delay = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.106696] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] neutron.default_floating_pool = public {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.106860] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] neutron.endpoint_override = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.107032] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] neutron.extension_sync_interval = 600 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.107201] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] neutron.http_retries = 3 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.107361] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] neutron.insecure = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.107519] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] neutron.keyfile = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.107677] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] neutron.max_version = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.107851] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] neutron.metadata_proxy_shared_secret = **** {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.108030] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] neutron.min_version = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.108189] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] neutron.ovs_bridge = br-int {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.108357] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] neutron.physnets = [] {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.108567] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] neutron.region_name = RegionOne {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.108755] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] neutron.retriable_status_codes = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.108932] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] neutron.service_metadata_proxy = True {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.109114] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] neutron.service_name = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.109288] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] neutron.service_type = network {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.109451] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] neutron.split_loggers = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.109634] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] neutron.status_code_retries = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.109825] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] neutron.status_code_retry_delay = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.109994] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] neutron.timeout = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.110201] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.110364] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] neutron.version = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.110538] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] notifications.bdms_in_notifications = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.110716] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] notifications.default_level = INFO {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.110882] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] notifications.include_share_mapping = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.111068] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] notifications.notification_format = unversioned {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.111235] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] notifications.notify_on_state_change = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.111412] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.111587] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] pci.alias = [] {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.111795] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] pci.device_spec = [] {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.111969] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] pci.report_in_placement = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.112160] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] placement.auth_section = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.112338] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] placement.auth_type = password {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.112509] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] placement.auth_url = http://10.180.1.21/identity {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.112685] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] placement.cafile = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.112864] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] placement.certfile = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.113048] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] placement.collect_timing = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.113216] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] placement.connect_retries = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.113379] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] placement.connect_retry_delay = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.113543] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] placement.default_domain_id = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.113702] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] placement.default_domain_name = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.113865] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] placement.domain_id = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.114033] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] placement.domain_name = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.114199] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] placement.endpoint_override = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.114363] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] placement.insecure = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.114523] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] placement.keyfile = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.114682] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] placement.max_version = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.114843] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] placement.min_version = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.115027] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] placement.password = **** {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.115187] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] placement.project_domain_id = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.115357] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] placement.project_domain_name = Default {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.115528] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] placement.project_id = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.115703] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] placement.project_name = service {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.115877] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] placement.region_name = RegionOne {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.116052] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] placement.retriable_status_codes = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.116222] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] placement.service_name = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.116396] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] placement.service_type = placement {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.116563] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] placement.split_loggers = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.116724] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] placement.status_code_retries = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.116888] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] placement.status_code_retry_delay = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.117128] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] placement.system_scope = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.117319] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] placement.timeout = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.117485] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] placement.trust_id = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.117647] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] placement.user_domain_id = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.117822] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] placement.user_domain_name = Default {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.117986] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] placement.user_id = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.118179] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] placement.username = nova {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.118368] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.118531] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] placement.version = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.118711] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] quota.cores = 20 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.118881] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] quota.count_usage_from_placement = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.119066] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.119242] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] quota.injected_file_content_bytes = 10240 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.119411] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] quota.injected_file_path_length = 255 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.119595] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] quota.injected_files = 5 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.119776] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] quota.instances = 10 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.119948] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] quota.key_pairs = 100 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.120132] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] quota.metadata_items = 128 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.120303] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] quota.ram = 51200 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.120470] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] quota.recheck_quota = True {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.120639] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] quota.server_group_members = 10 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.120805] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] quota.server_groups = 10 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.121029] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] quota.unified_limits_resource_list = ['servers'] {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.121213] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] quota.unified_limits_resource_strategy = require {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.121388] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.121556] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.121748] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] scheduler.image_metadata_prefilter = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.121922] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.122099] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] scheduler.max_attempts = 3 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.122272] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] scheduler.max_placement_results = 1000 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.122436] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.122597] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] scheduler.query_placement_for_image_type_support = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.122780] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.122964] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] scheduler.workers = 2 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.123158] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.123333] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.123514] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.123686] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.123858] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.124034] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.124205] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.124393] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.124560] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] filter_scheduler.host_subset_size = 1 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.124727] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.124891] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] filter_scheduler.image_properties_default_architecture = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.125071] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] filter_scheduler.image_props_weight_multiplier = 0.0 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.125254] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] filter_scheduler.image_props_weight_setting = [] {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.125421] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.125590] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] filter_scheduler.isolated_hosts = [] {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.125755] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] filter_scheduler.isolated_images = [] {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.125922] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] filter_scheduler.max_instances_per_host = 50 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.126096] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.126260] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.126422] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] filter_scheduler.pci_in_placement = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.126582] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.126742] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.126904] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.127075] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.127242] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.127403] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.127562] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] filter_scheduler.track_instance_changes = True {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.127737] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.127910] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] metrics.required = True {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.128089] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] metrics.weight_multiplier = 1.0 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.128257] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] metrics.weight_of_unavailable = -10000.0 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.128423] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] metrics.weight_setting = [] {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.128742] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.128919] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] serial_console.enabled = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.129111] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] serial_console.port_range = 10000:20000 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.129290] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.129461] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.129661] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] serial_console.serialproxy_port = 6083 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.129845] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] service_user.auth_section = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.130033] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] service_user.auth_type = password {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.130204] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] service_user.cafile = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.130367] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] service_user.certfile = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.130533] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] service_user.collect_timing = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.130696] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] service_user.insecure = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.130859] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] service_user.keyfile = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.131042] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] service_user.send_service_user_token = True {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.131211] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] service_user.split_loggers = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.131370] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] service_user.timeout = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.131540] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] spice.agent_enabled = True {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.131728] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] spice.enabled = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.132060] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.132271] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] spice.html5proxy_host = 0.0.0.0 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.132444] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] spice.html5proxy_port = 6082 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.132607] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] spice.image_compression = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.132796] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] spice.jpeg_compression = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.132964] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] spice.playback_compression = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.133146] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] spice.require_secure = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.133321] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] spice.server_listen = 127.0.0.1 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.133494] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.133779] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] spice.spice_direct_proxy_base_url = http://127.0.0.1:13002/nova {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.133954] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] spice.streaming_mode = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.134135] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] spice.zlib_compression = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.134309] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] upgrade_levels.baseapi = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.134487] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] upgrade_levels.compute = auto {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.134653] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] upgrade_levels.conductor = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.134818] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] upgrade_levels.scheduler = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.134987] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vendordata_dynamic_auth.auth_section = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.135169] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vendordata_dynamic_auth.auth_type = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.135335] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vendordata_dynamic_auth.cafile = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.135497] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vendordata_dynamic_auth.certfile = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.135661] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vendordata_dynamic_auth.collect_timing = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.135827] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vendordata_dynamic_auth.insecure = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.135987] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vendordata_dynamic_auth.keyfile = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.136168] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vendordata_dynamic_auth.split_loggers = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.136330] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vendordata_dynamic_auth.timeout = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.136507] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vmware.api_retry_count = 10 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.136676] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vmware.ca_file = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.136843] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vmware.cache_prefix = devstack-image-cache {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.137024] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vmware.cluster_name = testcl1 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.137200] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vmware.connection_pool_size = 10 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.137362] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vmware.console_delay_seconds = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.137534] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vmware.datastore_regex = ^datastore.* {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.137749] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.137930] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vmware.host_password = **** {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.138112] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vmware.host_port = 443 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.138284] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vmware.host_username = administrator@vsphere.local {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.138454] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vmware.insecure = True {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.138619] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vmware.integration_bridge = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.138784] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vmware.maximum_objects = 100 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.138945] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vmware.pbm_default_policy = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.139123] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vmware.pbm_enabled = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.139287] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vmware.pbm_wsdl_location = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.139455] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.139641] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vmware.serial_port_proxy_uri = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.139813] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vmware.serial_port_service_uri = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.139983] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vmware.task_poll_interval = 0.5 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.140171] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vmware.use_linked_clone = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.140341] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vmware.vnc_keymap = en-us {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.140509] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vmware.vnc_port = 5900 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.140674] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vmware.vnc_port_total = 10000 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.140861] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vnc.auth_schemes = ['none'] {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.141046] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vnc.enabled = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.141344] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.141531] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.141735] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vnc.novncproxy_port = 6080 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.141935] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vnc.server_listen = 127.0.0.1 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.142134] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.142301] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vnc.vencrypt_ca_certs = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.142462] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vnc.vencrypt_client_cert = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.142623] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vnc.vencrypt_client_key = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.142832] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.143012] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] workarounds.disable_deep_image_inspection = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.143187] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] workarounds.disable_fallback_pcpu_query = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.143351] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] workarounds.disable_group_policy_check_upcall = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.143515] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.143678] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] workarounds.disable_rootwrap = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.143842] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] workarounds.enable_numa_live_migration = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.144036] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.144195] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.144356] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] workarounds.handle_virt_lifecycle_events = True {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.144518] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] workarounds.libvirt_disable_apic = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.144680] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] workarounds.never_download_image_if_on_rbd = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.144846] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.145012] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.145183] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.145345] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.145504] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.145666] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.145829] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.145990] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.146171] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.146358] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.146526] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] wsgi.secure_proxy_ssl_header = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.146693] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] zvm.ca_file = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.146857] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] zvm.cloud_connector_url = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.147325] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.147511] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] zvm.reachable_timeout = 300 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.147690] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.147871] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.148065] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] profiler.connection_string = messaging:// {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.148239] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] profiler.enabled = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.148411] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] profiler.es_doc_type = notification {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.148573] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] profiler.es_scroll_size = 10000 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.148742] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] profiler.es_scroll_time = 2m {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.148905] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] profiler.filter_error_trace = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.149085] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] profiler.hmac_keys = **** {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.149262] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] profiler.sentinel_service_name = mymaster {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.149431] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] profiler.socket_timeout = 0.1 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.149620] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] profiler.trace_requests = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.149799] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] profiler.trace_sqlalchemy = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.149982] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] profiler_jaeger.process_tags = {} {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.150161] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] profiler_jaeger.service_name_prefix = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.150327] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] profiler_otlp.service_name_prefix = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.150503] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.150667] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.150831] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.150990] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.151165] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.151324] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.151484] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.151652] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.151837] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.152029] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_messaging_rabbit.hostname = devstack {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.152186] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.152356] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.152523] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.152709] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.152890] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_messaging_rabbit.kombu_reconnect_splay = 0.0 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.153071] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.153237] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.153400] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.153572] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.153733] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.153895] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.154067] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.154231] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.154390] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.154551] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.154711] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.154899] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.155089] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.155256] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.155417] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.155578] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_messaging_rabbit.ssl = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.155764] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.155936] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.156116] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.156291] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.156459] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_messaging_rabbit.ssl_version = {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.156623] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.156812] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.156983] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_messaging_notifications.retry = -1 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.157178] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.157351] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_messaging_notifications.transport_url = **** {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.157526] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_limit.auth_section = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.157692] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_limit.auth_type = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.157856] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_limit.cafile = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.158023] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_limit.certfile = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.158191] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_limit.collect_timing = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.158351] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_limit.connect_retries = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.158510] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_limit.connect_retry_delay = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.158670] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_limit.endpoint_id = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.158865] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_limit.endpoint_interface = publicURL {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.159038] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_limit.endpoint_override = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.159202] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_limit.endpoint_region_name = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.159362] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_limit.endpoint_service_name = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.159522] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_limit.endpoint_service_type = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.159714] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_limit.insecure = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.159879] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_limit.keyfile = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.160049] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_limit.max_version = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.160212] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_limit.min_version = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.160372] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_limit.region_name = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.160533] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_limit.retriable_status_codes = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.160693] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_limit.service_name = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.160853] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_limit.service_type = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.161026] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_limit.split_loggers = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.161192] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_limit.status_code_retries = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.161351] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_limit.status_code_retry_delay = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.161510] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_limit.timeout = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.161685] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_limit.valid_interfaces = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.161867] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_limit.version = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.162051] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_reports.file_event_handler = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.162224] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_reports.file_event_handler_interval = 1 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.162390] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] oslo_reports.log_dir = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.162565] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.162758] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vif_plug_linux_bridge_privileged.group = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.162934] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.163114] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vif_plug_linux_bridge_privileged.log_daemon_traceback = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.163286] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.163452] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.163614] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vif_plug_linux_bridge_privileged.user = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.163789] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.163954] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vif_plug_ovs_privileged.group = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.164211] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vif_plug_ovs_privileged.helper_command = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.164288] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vif_plug_ovs_privileged.log_daemon_traceback = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.164447] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.164608] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.164765] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] vif_plug_ovs_privileged.user = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.164940] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] os_vif_linux_bridge.flat_interface = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.165137] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.165316] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.165488] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.165660] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.165833] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.166012] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.166197] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] os_vif_linux_bridge.vlan_interface = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.166376] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.166547] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] os_vif_ovs.isolate_vif = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.166719] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.166888] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.167068] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.167243] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] os_vif_ovs.ovsdb_interface = native {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.167404] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] os_vif_ovs.per_port_bridge = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.167574] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] privsep_osbrick.capabilities = [21] {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.167737] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] privsep_osbrick.group = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.167898] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] privsep_osbrick.helper_command = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.168074] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] privsep_osbrick.log_daemon_traceback = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.168247] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.168412] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] privsep_osbrick.thread_pool_size = 8 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.168573] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] privsep_osbrick.user = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.168751] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.168917] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] nova_sys_admin.group = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.169090] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] nova_sys_admin.helper_command = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.169274] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] nova_sys_admin.log_daemon_traceback = False {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.169448] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.169637] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] nova_sys_admin.thread_pool_size = 8 {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.169810] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] nova_sys_admin.user = None {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 552.169949] env[69367]: DEBUG oslo_service.backend.eventlet.service [None req-179de5a3-7c84-4c20-9a57-63fbe2ad2019 None None] ******************************************************************************** {{(pid=69367) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2828}} [ 552.170402] env[69367]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 552.674491] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-0da3b70e-3d1a-481e-93a9-aea6b7a98ee1 None None] Getting list of instances from cluster (obj){ [ 552.674491] env[69367]: value = "domain-c8" [ 552.674491] env[69367]: _type = "ClusterComputeResource" [ 552.674491] env[69367]: } {{(pid=69367) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 552.675398] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c77dd953-d955-43c6-a1fe-16357047bb96 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.685098] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-0da3b70e-3d1a-481e-93a9-aea6b7a98ee1 None None] Got total of 0 instances {{(pid=69367) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 552.685699] env[69367]: WARNING nova.virt.vmwareapi.driver [None req-0da3b70e-3d1a-481e-93a9-aea6b7a98ee1 None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 552.686231] env[69367]: INFO nova.virt.node [None req-0da3b70e-3d1a-481e-93a9-aea6b7a98ee1 None None] Generated node identity 19ddf8be-7305-4f70-8366-52a9957232e6 [ 552.686469] env[69367]: INFO nova.virt.node [None req-0da3b70e-3d1a-481e-93a9-aea6b7a98ee1 None None] Wrote node identity 19ddf8be-7305-4f70-8366-52a9957232e6 to /opt/stack/data/n-cpu-1/compute_id [ 553.189785] env[69367]: WARNING nova.compute.manager [None req-0da3b70e-3d1a-481e-93a9-aea6b7a98ee1 None None] Compute nodes ['19ddf8be-7305-4f70-8366-52a9957232e6'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 554.196449] env[69367]: INFO nova.compute.manager [None req-0da3b70e-3d1a-481e-93a9-aea6b7a98ee1 None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 555.202416] env[69367]: WARNING nova.compute.manager [None req-0da3b70e-3d1a-481e-93a9-aea6b7a98ee1 None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 555.202865] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0da3b70e-3d1a-481e-93a9-aea6b7a98ee1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 555.202970] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0da3b70e-3d1a-481e-93a9-aea6b7a98ee1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 555.203139] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0da3b70e-3d1a-481e-93a9-aea6b7a98ee1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 555.203301] env[69367]: DEBUG nova.compute.resource_tracker [None req-0da3b70e-3d1a-481e-93a9-aea6b7a98ee1 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69367) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 555.204254] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c496c77d-8fae-41c5-a023-f4886d595a8d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.212890] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-303c646b-2259-4bfd-9943-65089be578ac {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.226527] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e84818f0-f853-4afa-b989-4ea87a10a72b {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.233448] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e9c36bd-1c39-4f75-aa01-d65d9349897e {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.262580] env[69367]: DEBUG nova.compute.resource_tracker [None req-0da3b70e-3d1a-481e-93a9-aea6b7a98ee1 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180761MB free_disk=1GB free_vcpus=48 pci_devices=None {{(pid=69367) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 555.262789] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0da3b70e-3d1a-481e-93a9-aea6b7a98ee1 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 555.262958] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0da3b70e-3d1a-481e-93a9-aea6b7a98ee1 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 555.765562] env[69367]: WARNING nova.compute.resource_tracker [None req-0da3b70e-3d1a-481e-93a9-aea6b7a98ee1 None None] No compute node record for cpu-1:19ddf8be-7305-4f70-8366-52a9957232e6: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host 19ddf8be-7305-4f70-8366-52a9957232e6 could not be found. [ 556.269587] env[69367]: INFO nova.compute.resource_tracker [None req-0da3b70e-3d1a-481e-93a9-aea6b7a98ee1 None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: 19ddf8be-7305-4f70-8366-52a9957232e6 [ 557.778292] env[69367]: DEBUG nova.compute.resource_tracker [None req-0da3b70e-3d1a-481e-93a9-aea6b7a98ee1 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=69367) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 557.778690] env[69367]: DEBUG nova.compute.resource_tracker [None req-0da3b70e-3d1a-481e-93a9-aea6b7a98ee1 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=69367) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 557.940640] env[69367]: INFO nova.scheduler.client.report [None req-0da3b70e-3d1a-481e-93a9-aea6b7a98ee1 None None] [req-09124a62-3b1b-46ee-a91d-0a7c0347a0b9] Created resource provider record via placement API for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6 and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 557.955912] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa9b3394-9dfb-453b-8bf7-a9fee3302ee6 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.965013] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c92cbd49-0ffd-4bfc-b4f3-7d2fcca24885 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.996010] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-468ecfea-16c6-43ed-b5b8-4e9f560fdaa9 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.004015] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0f20d9a-9233-468b-9750-d314d9b948d9 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.019157] env[69367]: DEBUG nova.compute.provider_tree [None req-0da3b70e-3d1a-481e-93a9-aea6b7a98ee1 None None] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 558.556718] env[69367]: DEBUG nova.scheduler.client.report [None req-0da3b70e-3d1a-481e-93a9-aea6b7a98ee1 None None] Updated inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 558.556954] env[69367]: DEBUG nova.compute.provider_tree [None req-0da3b70e-3d1a-481e-93a9-aea6b7a98ee1 None None] Updating resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 generation from 0 to 1 during operation: update_inventory {{(pid=69367) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 558.557181] env[69367]: DEBUG nova.compute.provider_tree [None req-0da3b70e-3d1a-481e-93a9-aea6b7a98ee1 None None] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 558.605902] env[69367]: DEBUG nova.compute.provider_tree [None req-0da3b70e-3d1a-481e-93a9-aea6b7a98ee1 None None] Updating resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 generation from 1 to 2 during operation: update_traits {{(pid=69367) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 559.111264] env[69367]: DEBUG nova.compute.resource_tracker [None req-0da3b70e-3d1a-481e-93a9-aea6b7a98ee1 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69367) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1097}} [ 559.111645] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0da3b70e-3d1a-481e-93a9-aea6b7a98ee1 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.848s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 559.111645] env[69367]: DEBUG nova.service [None req-0da3b70e-3d1a-481e-93a9-aea6b7a98ee1 None None] Creating RPC server for service compute {{(pid=69367) start /opt/stack/nova/nova/service.py:177}} [ 559.126554] env[69367]: DEBUG nova.service [None req-0da3b70e-3d1a-481e-93a9-aea6b7a98ee1 None None] Join ServiceGroup membership for this service compute {{(pid=69367) start /opt/stack/nova/nova/service.py:194}} [ 559.126791] env[69367]: DEBUG nova.servicegroup.drivers.db [None req-0da3b70e-3d1a-481e-93a9-aea6b7a98ee1 None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=69367) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 593.129698] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._sync_power_states {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 593.634876] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Getting list of instances from cluster (obj){ [ 593.634876] env[69367]: value = "domain-c8" [ 593.634876] env[69367]: _type = "ClusterComputeResource" [ 593.634876] env[69367]: } {{(pid=69367) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 593.636076] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50430016-0ee2-423f-86e7-ef9f825738c0 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.650628] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Got total of 0 instances {{(pid=69367) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 593.650918] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 593.651424] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Getting list of instances from cluster (obj){ [ 593.651424] env[69367]: value = "domain-c8" [ 593.651424] env[69367]: _type = "ClusterComputeResource" [ 593.651424] env[69367]: } {{(pid=69367) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 593.652759] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70dc0c5d-03fb-48cf-afdb-567e0bf9fedc {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.667518] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Got total of 0 instances {{(pid=69367) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 602.143592] env[69367]: INFO nova.utils [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] The default thread pool MainProcess.default is initialized [ 602.144116] env[69367]: DEBUG oslo_concurrency.lockutils [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Acquiring lock "937c05e9-06f1-4a5f-9f8c-ac40c262ce4e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 602.144387] env[69367]: DEBUG oslo_concurrency.lockutils [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Lock "937c05e9-06f1-4a5f-9f8c-ac40c262ce4e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 602.370377] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Acquiring lock "1302cad6-55b7-4905-92c1-dfdd37042e30" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 602.370377] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Lock "1302cad6-55b7-4905-92c1-dfdd37042e30" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 602.650763] env[69367]: DEBUG nova.compute.manager [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] [instance: 937c05e9-06f1-4a5f-9f8c-ac40c262ce4e] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 602.750563] env[69367]: DEBUG oslo_concurrency.lockutils [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Acquiring lock "5c7b2127-e875-4222-8148-a2ea60631c25" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 602.750719] env[69367]: DEBUG oslo_concurrency.lockutils [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Lock "5c7b2127-e875-4222-8148-a2ea60631c25" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 602.874123] env[69367]: DEBUG nova.compute.manager [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] [instance: 1302cad6-55b7-4905-92c1-dfdd37042e30] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 603.212817] env[69367]: DEBUG oslo_concurrency.lockutils [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 603.213141] env[69367]: DEBUG oslo_concurrency.lockutils [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 603.215923] env[69367]: INFO nova.compute.claims [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] [instance: 937c05e9-06f1-4a5f-9f8c-ac40c262ce4e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 603.253477] env[69367]: DEBUG nova.compute.manager [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 603.406109] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 603.784199] env[69367]: DEBUG oslo_concurrency.lockutils [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 603.841851] env[69367]: DEBUG oslo_concurrency.lockutils [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Acquiring lock "a358ce6d-9826-4ddb-8c2f-51bac8db59d4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 603.841904] env[69367]: DEBUG oslo_concurrency.lockutils [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Lock "a358ce6d-9826-4ddb-8c2f-51bac8db59d4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 604.346502] env[69367]: DEBUG nova.compute.manager [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 604.375853] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9db552c-51f1-479e-a3d7-37937e4ffccd {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.388985] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d06aa9e0-f40a-4345-8274-9df90c2cb4e9 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.425642] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a3125ef-422e-44c8-b0b5-fb9a167caaa6 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.438454] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0206e5e-e1b9-4c0c-a43a-9db2517b57e2 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.460671] env[69367]: DEBUG nova.compute.provider_tree [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Inventory has not changed in ProviderTree for provider: 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 604.610776] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Acquiring lock "4e346ed1-36e9-421d-975f-e8bb6f05c0a0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 604.611045] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Lock "4e346ed1-36e9-421d-975f-e8bb6f05c0a0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 604.883770] env[69367]: DEBUG oslo_concurrency.lockutils [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 604.964061] env[69367]: DEBUG nova.scheduler.client.report [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Inventory has not changed for provider 19ddf8be-7305-4f70-8366-52a9957232e6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 605.115929] env[69367]: DEBUG nova.compute.manager [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 605.481651] env[69367]: DEBUG oslo_concurrency.lockutils [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.267s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 605.484684] env[69367]: DEBUG nova.compute.manager [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] [instance: 937c05e9-06f1-4a5f-9f8c-ac40c262ce4e] Start building networks asynchronously for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 605.489124] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.083s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 605.489478] env[69367]: INFO nova.compute.claims [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] [instance: 1302cad6-55b7-4905-92c1-dfdd37042e30] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 605.636363] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 605.895884] env[69367]: DEBUG oslo_concurrency.lockutils [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Acquiring lock "e1c7d100-4ad7-4871-970f-bb7562bfc6fc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 605.896232] env[69367]: DEBUG oslo_concurrency.lockutils [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Lock "e1c7d100-4ad7-4871-970f-bb7562bfc6fc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 605.998091] env[69367]: DEBUG nova.compute.utils [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Using /dev/sd instead of None {{(pid=69367) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 605.999575] env[69367]: DEBUG nova.compute.manager [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] [instance: 937c05e9-06f1-4a5f-9f8c-ac40c262ce4e] Not allocating networking since 'none' was specified. {{(pid=69367) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 606.402668] env[69367]: DEBUG nova.compute.manager [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 606.501662] env[69367]: DEBUG nova.compute.manager [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] [instance: 937c05e9-06f1-4a5f-9f8c-ac40c262ce4e] Start building block device mappings for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 606.686940] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c0d2247-0b8c-4fa4-a3ee-97946eb9cef9 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.696350] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2aa21976-d6ed-4006-978d-3fc5631b2102 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.740602] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-992edb25-b2e9-4b6a-ab04-5286d4fd3cf7 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.750110] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2af195d-29f4-4a0d-b34f-ea9d424cc396 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.767944] env[69367]: DEBUG nova.compute.provider_tree [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Inventory has not changed in ProviderTree for provider: 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 606.803907] env[69367]: DEBUG oslo_concurrency.lockutils [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Acquiring lock "3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 606.804668] env[69367]: DEBUG oslo_concurrency.lockutils [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Lock "3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 606.940267] env[69367]: DEBUG oslo_concurrency.lockutils [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 607.104271] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 607.104914] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 607.105211] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 607.105338] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 607.105543] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 607.105807] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 607.106154] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 607.106315] env[69367]: DEBUG nova.compute.manager [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69367) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11183}} [ 607.106495] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 607.277153] env[69367]: DEBUG nova.scheduler.client.report [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Inventory has not changed for provider 19ddf8be-7305-4f70-8366-52a9957232e6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 607.308718] env[69367]: DEBUG nova.compute.manager [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 607.514833] env[69367]: DEBUG nova.compute.manager [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] [instance: 937c05e9-06f1-4a5f-9f8c-ac40c262ce4e] Start spawning the instance on the hypervisor. {{(pid=69367) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 607.609864] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 607.788600] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.301s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 607.789159] env[69367]: DEBUG nova.compute.manager [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] [instance: 1302cad6-55b7-4905-92c1-dfdd37042e30] Start building networks asynchronously for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 607.793355] env[69367]: DEBUG oslo_concurrency.lockutils [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.010s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 607.798020] env[69367]: INFO nova.compute.claims [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 607.853588] env[69367]: DEBUG oslo_concurrency.lockutils [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 608.049554] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Acquiring lock "92bdb1b1-d8ab-46b2-9037-ee8fea4642ce" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 608.049787] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Lock "92bdb1b1-d8ab-46b2-9037-ee8fea4642ce" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 608.146170] env[69367]: DEBUG nova.virt.hardware [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-19T12:49:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-19T12:49:28Z,direct_url=,disk_format='vmdk',id=2b099420-9152-4d93-9609-4c9317824c11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cd7c200d5cd6461fb951580f8c764c42',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-19T12:49:29Z,virtual_size=,visibility=), allow threads: False {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 608.146486] env[69367]: DEBUG nova.virt.hardware [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Flavor limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 608.147185] env[69367]: DEBUG nova.virt.hardware [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Image limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 608.147185] env[69367]: DEBUG nova.virt.hardware [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Flavor pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 608.147410] env[69367]: DEBUG nova.virt.hardware [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Image pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 608.147410] env[69367]: DEBUG nova.virt.hardware [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 608.147675] env[69367]: DEBUG nova.virt.hardware [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 608.148464] env[69367]: DEBUG nova.virt.hardware [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 608.150296] env[69367]: DEBUG nova.virt.hardware [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Got 1 possible topologies {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 608.150296] env[69367]: DEBUG nova.virt.hardware [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 608.150296] env[69367]: DEBUG nova.virt.hardware [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 608.153047] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d98927bf-5ca0-4de0-95c1-373091343edb {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.165380] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93d260ed-d1cd-4fce-98c7-363357d37424 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.192062] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e89ae95f-6482-46b9-bdde-0a6023cbcad2 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.213080] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] [instance: 937c05e9-06f1-4a5f-9f8c-ac40c262ce4e] Instance VIF info [] {{(pid=69367) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 608.225024] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=69367) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 608.225024] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5374a0ca-51fb-43b1-ace2-50d7a73d5f28 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.240078] env[69367]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 608.240271] env[69367]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=69367) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 608.240661] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Folder already exists: OpenStack. Parent ref: group-v4. {{(pid=69367) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 608.241150] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Creating folder: Project (e4a7abd8be884430baf03a9621a75c45). Parent ref: group-v837645. {{(pid=69367) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 608.243322] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-03d139d2-8b20-4d78-a825-ed5a3703c3bc {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.256434] env[69367]: INFO nova.virt.vmwareapi.vm_util [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Created folder: Project (e4a7abd8be884430baf03a9621a75c45) in parent group-v837645. [ 608.256434] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Creating folder: Instances. Parent ref: group-v837649. {{(pid=69367) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 608.256434] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-183e7459-2d1f-46ea-956d-be11b92129eb {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.268251] env[69367]: INFO nova.virt.vmwareapi.vm_util [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Created folder: Instances in parent group-v837649. [ 608.268251] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 608.268420] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 937c05e9-06f1-4a5f-9f8c-ac40c262ce4e] Creating VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 608.268821] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ebb2c945-a733-4ff3-88b3-8c20a6509401 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.291289] env[69367]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 608.291289] env[69367]: value = "task-4233682" [ 608.291289] env[69367]: _type = "Task" [ 608.291289] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.302995] env[69367]: DEBUG nova.compute.utils [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Using /dev/sd instead of None {{(pid=69367) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 608.310789] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4233682, 'name': CreateVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.315413] env[69367]: DEBUG nova.compute.manager [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] [instance: 1302cad6-55b7-4905-92c1-dfdd37042e30] Allocating IP information in the background. {{(pid=69367) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 608.315413] env[69367]: DEBUG nova.network.neutron [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] [instance: 1302cad6-55b7-4905-92c1-dfdd37042e30] allocate_for_instance() {{(pid=69367) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 608.507801] env[69367]: DEBUG nova.policy [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1d7dbd0855df4e5fb3ad7c25ce26868e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f78c11c8a4c848e29c847f5e0d5b55f5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69367) authorize /opt/stack/nova/nova/policy.py:192}} [ 608.552564] env[69367]: DEBUG nova.compute.manager [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] [instance: 92bdb1b1-d8ab-46b2-9037-ee8fea4642ce] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 608.815804] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4233682, 'name': CreateVM_Task} progress is 99%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.816594] env[69367]: DEBUG nova.compute.manager [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] [instance: 1302cad6-55b7-4905-92c1-dfdd37042e30] Start building block device mappings for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 609.029358] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d883536e-5547-4bde-a429-b4b09ff900c7 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.041358] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f194232-a55d-4ef8-a041-00a55cc85699 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.089510] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd9a689a-d93f-469f-9f58-b2978345556e {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.099069] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0d5c600-4727-4442-bcf5-fb10d68a179d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.122716] env[69367]: DEBUG nova.compute.provider_tree [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Inventory has not changed in ProviderTree for provider: 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 609.123213] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 609.308190] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4233682, 'name': CreateVM_Task, 'duration_secs': 0.618583} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.308190] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 937c05e9-06f1-4a5f-9f8c-ac40c262ce4e] Created VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 609.309606] env[69367]: DEBUG oslo_vmware.service [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42869492-5ac6-4d2a-a91a-555878d825ac {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.316424] env[69367]: DEBUG oslo_concurrency.lockutils [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 609.316678] env[69367]: DEBUG oslo_concurrency.lockutils [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 609.317485] env[69367]: DEBUG oslo_concurrency.lockutils [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 609.321330] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2425e0f4-6a8a-491e-91df-1fca1c5d7fd0 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.332271] env[69367]: DEBUG oslo_vmware.api [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Waiting for the task: (returnval){ [ 609.332271] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52df847f-d99c-11d3-0737-ba125942cd63" [ 609.332271] env[69367]: _type = "Task" [ 609.332271] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.344574] env[69367]: DEBUG oslo_vmware.api [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52df847f-d99c-11d3-0737-ba125942cd63, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.624129] env[69367]: DEBUG nova.scheduler.client.report [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Inventory has not changed for provider 19ddf8be-7305-4f70-8366-52a9957232e6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 609.828629] env[69367]: DEBUG nova.compute.manager [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] [instance: 1302cad6-55b7-4905-92c1-dfdd37042e30] Start spawning the instance on the hypervisor. {{(pid=69367) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 609.849847] env[69367]: DEBUG oslo_concurrency.lockutils [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 609.850251] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] [instance: 937c05e9-06f1-4a5f-9f8c-ac40c262ce4e] Processing image 2b099420-9152-4d93-9609-4c9317824c11 {{(pid=69367) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 609.851214] env[69367]: DEBUG oslo_concurrency.lockutils [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 609.851214] env[69367]: DEBUG oslo_concurrency.lockutils [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 609.851524] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 609.851951] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9eae9a67-fa23-420a-aab4-0684c82f7827 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.864446] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 609.864446] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69367) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 609.864446] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-437969bf-7942-4aac-b5e3-45452b838a53 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.870748] env[69367]: DEBUG nova.virt.hardware [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-19T12:49:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-19T12:49:28Z,direct_url=,disk_format='vmdk',id=2b099420-9152-4d93-9609-4c9317824c11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cd7c200d5cd6461fb951580f8c764c42',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-19T12:49:29Z,virtual_size=,visibility=), allow threads: False {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 609.871167] env[69367]: DEBUG nova.virt.hardware [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Flavor limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 609.871419] env[69367]: DEBUG nova.virt.hardware [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Image limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 609.872951] env[69367]: DEBUG nova.virt.hardware [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Flavor pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 609.872951] env[69367]: DEBUG nova.virt.hardware [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Image pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 609.872951] env[69367]: DEBUG nova.virt.hardware [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 609.873683] env[69367]: DEBUG nova.virt.hardware [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 609.874408] env[69367]: DEBUG nova.virt.hardware [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 609.874408] env[69367]: DEBUG nova.virt.hardware [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Got 1 possible topologies {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 609.874772] env[69367]: DEBUG nova.virt.hardware [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 609.875797] env[69367]: DEBUG nova.virt.hardware [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 609.876785] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c152f66-b4d6-4cd9-b946-c4ab75d6bf58 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.894299] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c3c70509-8a7e-4a97-b211-1d8a12caa786 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.900781] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b1a63a6-0a7e-4711-8524-dc5c5bfe2f2f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.909720] env[69367]: DEBUG oslo_vmware.api [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Waiting for the task: (returnval){ [ 609.909720] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52b005b3-24a4-c165-e0bf-880b0769509c" [ 609.909720] env[69367]: _type = "Task" [ 609.909720] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.929231] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] [instance: 937c05e9-06f1-4a5f-9f8c-ac40c262ce4e] Preparing fetch location {{(pid=69367) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 609.929512] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Creating directory with path [datastore2] vmware_temp/e37abdf1-cc2d-4a95-a6a4-0450fc42ca0c/2b099420-9152-4d93-9609-4c9317824c11 {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 609.929773] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-eac64416-9276-4f14-bd47-b55eb3ec9e0f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.944275] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Created directory with path [datastore2] vmware_temp/e37abdf1-cc2d-4a95-a6a4-0450fc42ca0c/2b099420-9152-4d93-9609-4c9317824c11 {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 609.945575] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] [instance: 937c05e9-06f1-4a5f-9f8c-ac40c262ce4e] Fetch image to [datastore2] vmware_temp/e37abdf1-cc2d-4a95-a6a4-0450fc42ca0c/2b099420-9152-4d93-9609-4c9317824c11/tmp-sparse.vmdk {{(pid=69367) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 609.945575] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] [instance: 937c05e9-06f1-4a5f-9f8c-ac40c262ce4e] Downloading image file data 2b099420-9152-4d93-9609-4c9317824c11 to [datastore2] vmware_temp/e37abdf1-cc2d-4a95-a6a4-0450fc42ca0c/2b099420-9152-4d93-9609-4c9317824c11/tmp-sparse.vmdk on the data store datastore2 {{(pid=69367) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 609.945575] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1ea6cb9-0f6e-4d5e-9811-58db79148c60 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.958378] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c368309-5015-4494-966c-187cbdf21a05 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.970297] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c6b5b50-5361-4de7-983c-f32150969996 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.013092] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77fd4a69-f75e-4372-840b-1f9edf0ca237 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.020952] env[69367]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-563cd831-54f6-4c1c-894b-895a8c3a2243 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.056173] env[69367]: DEBUG nova.virt.vmwareapi.images [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] [instance: 937c05e9-06f1-4a5f-9f8c-ac40c262ce4e] Downloading image file data 2b099420-9152-4d93-9609-4c9317824c11 to the data store datastore2 {{(pid=69367) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 610.128198] env[69367]: DEBUG oslo_vmware.rw_handles [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e37abdf1-cc2d-4a95-a6a4-0450fc42ca0c/2b099420-9152-4d93-9609-4c9317824c11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69367) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 610.129838] env[69367]: DEBUG oslo_concurrency.lockutils [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.337s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 610.130852] env[69367]: DEBUG nova.compute.manager [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] Start building networks asynchronously for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 610.133954] env[69367]: DEBUG oslo_concurrency.lockutils [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.251s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 610.136225] env[69367]: INFO nova.compute.claims [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 610.698664] env[69367]: DEBUG nova.compute.utils [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Using /dev/sd instead of None {{(pid=69367) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 610.700940] env[69367]: DEBUG nova.compute.manager [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] Allocating IP information in the background. {{(pid=69367) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 610.702329] env[69367]: DEBUG nova.network.neutron [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] allocate_for_instance() {{(pid=69367) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 610.834951] env[69367]: DEBUG oslo_vmware.rw_handles [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Completed reading data from the image iterator. {{(pid=69367) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 610.835305] env[69367]: DEBUG oslo_vmware.rw_handles [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e37abdf1-cc2d-4a95-a6a4-0450fc42ca0c/2b099420-9152-4d93-9609-4c9317824c11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69367) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 610.842036] env[69367]: DEBUG nova.policy [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '451c459e61f14e17bcdd2a3f5b1240fc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '25daec9a55d94273bf3088110afe7705', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69367) authorize /opt/stack/nova/nova/policy.py:192}} [ 610.979436] env[69367]: DEBUG nova.virt.vmwareapi.images [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] [instance: 937c05e9-06f1-4a5f-9f8c-ac40c262ce4e] Downloaded image file data 2b099420-9152-4d93-9609-4c9317824c11 to vmware_temp/e37abdf1-cc2d-4a95-a6a4-0450fc42ca0c/2b099420-9152-4d93-9609-4c9317824c11/tmp-sparse.vmdk on the data store datastore2 {{(pid=69367) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 610.981291] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] [instance: 937c05e9-06f1-4a5f-9f8c-ac40c262ce4e] Caching image {{(pid=69367) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 610.981718] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Copying Virtual Disk [datastore2] vmware_temp/e37abdf1-cc2d-4a95-a6a4-0450fc42ca0c/2b099420-9152-4d93-9609-4c9317824c11/tmp-sparse.vmdk to [datastore2] vmware_temp/e37abdf1-cc2d-4a95-a6a4-0450fc42ca0c/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 610.983482] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8b9bcc7e-3306-4823-bb37-ed9645433470 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.993126] env[69367]: DEBUG oslo_vmware.api [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Waiting for the task: (returnval){ [ 610.993126] env[69367]: value = "task-4233684" [ 610.993126] env[69367]: _type = "Task" [ 610.993126] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.008518] env[69367]: DEBUG oslo_vmware.api [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Task: {'id': task-4233684, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.200582] env[69367]: DEBUG nova.network.neutron [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] [instance: 1302cad6-55b7-4905-92c1-dfdd37042e30] Successfully created port: c1cd0433-e331-4e76-af42-c5cd0421b041 {{(pid=69367) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 611.203453] env[69367]: DEBUG nova.compute.manager [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] Start building block device mappings for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 611.344378] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3728d03d-a7c8-4cd3-ade5-eab11c7ceed9 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.359934] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95cfce09-77fa-4a7a-8fc1-e12b47fa4949 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.402446] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d6d2e45-6303-4d4c-be5a-40558929b498 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.413918] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffbf3390-8674-41a6-bb3c-852aba1fe05e {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.437023] env[69367]: DEBUG nova.compute.provider_tree [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Inventory has not changed in ProviderTree for provider: 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 611.516339] env[69367]: DEBUG oslo_vmware.api [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Task: {'id': task-4233684, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.937691] env[69367]: DEBUG nova.scheduler.client.report [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Inventory has not changed for provider 19ddf8be-7305-4f70-8366-52a9957232e6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 612.009087] env[69367]: DEBUG oslo_vmware.api [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Task: {'id': task-4233684, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.715356} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.009087] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Copied Virtual Disk [datastore2] vmware_temp/e37abdf1-cc2d-4a95-a6a4-0450fc42ca0c/2b099420-9152-4d93-9609-4c9317824c11/tmp-sparse.vmdk to [datastore2] vmware_temp/e37abdf1-cc2d-4a95-a6a4-0450fc42ca0c/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 612.009087] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Deleting the datastore file [datastore2] vmware_temp/e37abdf1-cc2d-4a95-a6a4-0450fc42ca0c/2b099420-9152-4d93-9609-4c9317824c11/tmp-sparse.vmdk {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 612.009087] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8c4acf37-7efc-4658-989b-3e6837943c7a {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.016124] env[69367]: DEBUG oslo_vmware.api [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Waiting for the task: (returnval){ [ 612.016124] env[69367]: value = "task-4233686" [ 612.016124] env[69367]: _type = "Task" [ 612.016124] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 612.029546] env[69367]: DEBUG oslo_vmware.api [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Task: {'id': task-4233686, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.216873] env[69367]: DEBUG nova.compute.manager [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] Start spawning the instance on the hypervisor. {{(pid=69367) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 612.254317] env[69367]: DEBUG nova.virt.hardware [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-19T12:49:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-19T12:49:28Z,direct_url=,disk_format='vmdk',id=2b099420-9152-4d93-9609-4c9317824c11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cd7c200d5cd6461fb951580f8c764c42',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-19T12:49:29Z,virtual_size=,visibility=), allow threads: False {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 612.254551] env[69367]: DEBUG nova.virt.hardware [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Flavor limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 612.254708] env[69367]: DEBUG nova.virt.hardware [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Image limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 612.254889] env[69367]: DEBUG nova.virt.hardware [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Flavor pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 612.255072] env[69367]: DEBUG nova.virt.hardware [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Image pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 612.255231] env[69367]: DEBUG nova.virt.hardware [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 612.255523] env[69367]: DEBUG nova.virt.hardware [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 612.255698] env[69367]: DEBUG nova.virt.hardware [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 612.255869] env[69367]: DEBUG nova.virt.hardware [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Got 1 possible topologies {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 612.256414] env[69367]: DEBUG nova.virt.hardware [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 612.256414] env[69367]: DEBUG nova.virt.hardware [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 612.257153] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba1ec714-1399-40a7-9206-1d4df6c3746c {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.266931] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99b4bed7-9ae0-41ab-aafc-d5db61b1477b {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.405914] env[69367]: DEBUG nova.network.neutron [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] Successfully created port: 4051893d-10cc-4cb9-8e30-089e8d3d4286 {{(pid=69367) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 612.445482] env[69367]: DEBUG oslo_concurrency.lockutils [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.312s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 612.446099] env[69367]: DEBUG nova.compute.manager [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] Start building networks asynchronously for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 612.450615] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.814s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 612.454539] env[69367]: INFO nova.compute.claims [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 612.528267] env[69367]: DEBUG oslo_vmware.api [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Task: {'id': task-4233686, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.04522} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.528997] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Deleted the datastore file {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 612.529235] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Moving file from [datastore2] vmware_temp/e37abdf1-cc2d-4a95-a6a4-0450fc42ca0c/2b099420-9152-4d93-9609-4c9317824c11 to [datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11. {{(pid=69367) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 612.529489] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-d6fc217e-20f8-48f5-b90c-3d8f19741efe {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.538936] env[69367]: DEBUG oslo_vmware.api [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Waiting for the task: (returnval){ [ 612.538936] env[69367]: value = "task-4233687" [ 612.538936] env[69367]: _type = "Task" [ 612.538936] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 612.555011] env[69367]: DEBUG oslo_vmware.api [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Task: {'id': task-4233687, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.961559] env[69367]: DEBUG nova.compute.utils [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Using /dev/sd instead of None {{(pid=69367) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 612.966989] env[69367]: DEBUG nova.compute.manager [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] Not allocating networking since 'none' was specified. {{(pid=69367) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 613.053974] env[69367]: DEBUG oslo_vmware.api [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Task: {'id': task-4233687, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.037203} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 613.053974] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] File moved {{(pid=69367) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 613.053974] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] [instance: 937c05e9-06f1-4a5f-9f8c-ac40c262ce4e] Cleaning up location [datastore2] vmware_temp/e37abdf1-cc2d-4a95-a6a4-0450fc42ca0c {{(pid=69367) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 613.053974] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Deleting the datastore file [datastore2] vmware_temp/e37abdf1-cc2d-4a95-a6a4-0450fc42ca0c {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 613.053974] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-547267d7-a633-4f6f-9d6d-18663a73ea4f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.066952] env[69367]: DEBUG oslo_vmware.api [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Waiting for the task: (returnval){ [ 613.066952] env[69367]: value = "task-4233688" [ 613.066952] env[69367]: _type = "Task" [ 613.066952] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.077935] env[69367]: DEBUG oslo_vmware.api [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Task: {'id': task-4233688, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.471338] env[69367]: DEBUG nova.compute.manager [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] Start building block device mappings for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 613.602700] env[69367]: DEBUG oslo_vmware.api [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Task: {'id': task-4233688, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.031258} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 613.608041] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Deleted the datastore file {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 613.608041] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-faf0c901-18f8-46c3-a6ae-ad70f00f6eea {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.614312] env[69367]: DEBUG oslo_vmware.api [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Waiting for the task: (returnval){ [ 613.614312] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52028e0e-dbc9-ae13-0ab6-f8d416fbf49b" [ 613.614312] env[69367]: _type = "Task" [ 613.614312] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.654878] env[69367]: DEBUG oslo_vmware.api [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52028e0e-dbc9-ae13-0ab6-f8d416fbf49b, 'name': SearchDatastore_Task, 'duration_secs': 0.009574} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 613.656388] env[69367]: DEBUG oslo_concurrency.lockutils [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 613.656388] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore2] 937c05e9-06f1-4a5f-9f8c-ac40c262ce4e/937c05e9-06f1-4a5f-9f8c-ac40c262ce4e.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 613.656388] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-96230a0f-3f32-4724-a207-90e62f5f7458 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.668814] env[69367]: DEBUG oslo_vmware.api [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Waiting for the task: (returnval){ [ 613.668814] env[69367]: value = "task-4233690" [ 613.668814] env[69367]: _type = "Task" [ 613.668814] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.680138] env[69367]: DEBUG oslo_vmware.api [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Task: {'id': task-4233690, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.859333] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5b24f9c-e5d2-4127-a121-4a74e2d60816 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.874473] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fa143ed-be04-4f2a-aed4-6227eeeb0c23 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.915518] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc7925b3-892a-47ed-8730-58e53f431054 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.925374] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-411a7209-8ab0-4cd4-a0a5-04f340b5cc78 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.947212] env[69367]: DEBUG nova.compute.provider_tree [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Inventory has not changed in ProviderTree for provider: 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 614.179082] env[69367]: DEBUG oslo_vmware.api [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Task: {'id': task-4233690, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.451422] env[69367]: DEBUG nova.scheduler.client.report [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Inventory has not changed for provider 19ddf8be-7305-4f70-8366-52a9957232e6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 614.485204] env[69367]: DEBUG nova.compute.manager [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] Start spawning the instance on the hypervisor. {{(pid=69367) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 614.524150] env[69367]: DEBUG nova.virt.hardware [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-19T12:49:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-19T12:49:28Z,direct_url=,disk_format='vmdk',id=2b099420-9152-4d93-9609-4c9317824c11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cd7c200d5cd6461fb951580f8c764c42',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-19T12:49:29Z,virtual_size=,visibility=), allow threads: False {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 614.524455] env[69367]: DEBUG nova.virt.hardware [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Flavor limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 614.524603] env[69367]: DEBUG nova.virt.hardware [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Image limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 614.524799] env[69367]: DEBUG nova.virt.hardware [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Flavor pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 614.524950] env[69367]: DEBUG nova.virt.hardware [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Image pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 614.525110] env[69367]: DEBUG nova.virt.hardware [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 614.525329] env[69367]: DEBUG nova.virt.hardware [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 614.525492] env[69367]: DEBUG nova.virt.hardware [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 614.525974] env[69367]: DEBUG nova.virt.hardware [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Got 1 possible topologies {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 614.525974] env[69367]: DEBUG nova.virt.hardware [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 614.525974] env[69367]: DEBUG nova.virt.hardware [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 614.530478] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7cf190b-eeee-4064-8a43-e167f36510d3 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.540500] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fb561d0-7a46-42c6-a833-e11c2f6682a0 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.559404] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] Instance VIF info [] {{(pid=69367) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 614.565875] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Creating folder: Project (2ae7c1c32dc84da9a4d60fe7cd6743a2). Parent ref: group-v837645. {{(pid=69367) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 614.566396] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2b5683be-c14f-431f-a750-3d105fa8117e {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.577788] env[69367]: INFO nova.virt.vmwareapi.vm_util [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Created folder: Project (2ae7c1c32dc84da9a4d60fe7cd6743a2) in parent group-v837645. [ 614.578016] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Creating folder: Instances. Parent ref: group-v837653. {{(pid=69367) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 614.579080] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-879335c7-0758-4394-8cd9-4a7e3b5c63e6 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.588517] env[69367]: INFO nova.virt.vmwareapi.vm_util [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Created folder: Instances in parent group-v837653. [ 614.588794] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 614.589025] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] Creating VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 614.589241] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1b0c1581-dfeb-452e-86c2-d2cb3d792070 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.610920] env[69367]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 614.610920] env[69367]: value = "task-4233693" [ 614.610920] env[69367]: _type = "Task" [ 614.610920] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.621162] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4233693, 'name': CreateVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.684459] env[69367]: DEBUG oslo_vmware.api [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Task: {'id': task-4233690, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.523084} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.684782] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore2] 937c05e9-06f1-4a5f-9f8c-ac40c262ce4e/937c05e9-06f1-4a5f-9f8c-ac40c262ce4e.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 614.684782] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] [instance: 937c05e9-06f1-4a5f-9f8c-ac40c262ce4e] Extending root virtual disk to 1048576 {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 614.685014] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d75a2791-c64b-4400-94b3-bc73362c1454 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.695389] env[69367]: DEBUG oslo_vmware.api [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Waiting for the task: (returnval){ [ 614.695389] env[69367]: value = "task-4233694" [ 614.695389] env[69367]: _type = "Task" [ 614.695389] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.705584] env[69367]: DEBUG oslo_vmware.api [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Task: {'id': task-4233694, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.959424] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.509s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 614.959978] env[69367]: DEBUG nova.compute.manager [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Start building networks asynchronously for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 614.966235] env[69367]: DEBUG oslo_concurrency.lockutils [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.026s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 614.967783] env[69367]: INFO nova.compute.claims [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 615.131792] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4233693, 'name': CreateVM_Task, 'duration_secs': 0.345711} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 615.132111] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] Created VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 615.133011] env[69367]: DEBUG oslo_vmware.service [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0babca3-8812-42b8-9ca7-af3287882122 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.143427] env[69367]: DEBUG oslo_concurrency.lockutils [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 615.143596] env[69367]: DEBUG oslo_concurrency.lockutils [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 615.144660] env[69367]: DEBUG oslo_concurrency.lockutils [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 615.144660] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-50bfddd8-e8da-4d7a-a547-13d2c8de1f27 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.149384] env[69367]: DEBUG oslo_vmware.api [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Waiting for the task: (returnval){ [ 615.149384] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52860f56-7ebc-8bcd-5cb6-a2ac632db7b7" [ 615.149384] env[69367]: _type = "Task" [ 615.149384] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 615.161568] env[69367]: DEBUG oslo_vmware.api [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52860f56-7ebc-8bcd-5cb6-a2ac632db7b7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.211361] env[69367]: DEBUG oslo_vmware.api [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Task: {'id': task-4233694, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075535} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 615.212293] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] [instance: 937c05e9-06f1-4a5f-9f8c-ac40c262ce4e] Extended root virtual disk {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 615.213400] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0910ad2-08c2-47bd-9247-739f8e9eaf2e {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.236233] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] [instance: 937c05e9-06f1-4a5f-9f8c-ac40c262ce4e] Reconfiguring VM instance instance-00000001 to attach disk [datastore2] 937c05e9-06f1-4a5f-9f8c-ac40c262ce4e/937c05e9-06f1-4a5f-9f8c-ac40c262ce4e.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 615.236549] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fac729ee-b11e-4c15-84eb-c3234c4055e4 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.256889] env[69367]: DEBUG oslo_vmware.api [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Waiting for the task: (returnval){ [ 615.256889] env[69367]: value = "task-4233696" [ 615.256889] env[69367]: _type = "Task" [ 615.256889] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 615.266255] env[69367]: DEBUG oslo_vmware.api [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Task: {'id': task-4233696, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.471970] env[69367]: DEBUG nova.compute.utils [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Using /dev/sd instead of None {{(pid=69367) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 615.475911] env[69367]: DEBUG nova.compute.manager [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Not allocating networking since 'none' was specified. {{(pid=69367) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 615.665404] env[69367]: DEBUG oslo_concurrency.lockutils [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 615.665678] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] Processing image 2b099420-9152-4d93-9609-4c9317824c11 {{(pid=69367) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 615.665919] env[69367]: DEBUG oslo_concurrency.lockutils [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 615.666077] env[69367]: DEBUG oslo_concurrency.lockutils [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 615.666273] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 615.666559] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c8d1f3b5-cba4-4b7d-b9f9-cc40f5b9f346 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.680334] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 615.680334] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69367) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 615.681419] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cadf2131-12c9-45f2-960e-ecf5a43b1efd {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.689059] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-57f602cf-053b-4e90-8e13-015cd9ec76fc {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.696626] env[69367]: DEBUG oslo_vmware.api [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Waiting for the task: (returnval){ [ 615.696626] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]523303d1-1d51-7c32-c781-5bb6d016aef9" [ 615.696626] env[69367]: _type = "Task" [ 615.696626] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 615.708108] env[69367]: DEBUG oslo_vmware.api [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]523303d1-1d51-7c32-c781-5bb6d016aef9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.770756] env[69367]: DEBUG oslo_vmware.api [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Task: {'id': task-4233696, 'name': ReconfigVM_Task, 'duration_secs': 0.315613} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 615.771070] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] [instance: 937c05e9-06f1-4a5f-9f8c-ac40c262ce4e] Reconfigured VM instance instance-00000001 to attach disk [datastore2] 937c05e9-06f1-4a5f-9f8c-ac40c262ce4e/937c05e9-06f1-4a5f-9f8c-ac40c262ce4e.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 615.771791] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dc77c651-b950-40ac-bd58-4d95e6177ea3 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.780144] env[69367]: DEBUG oslo_vmware.api [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Waiting for the task: (returnval){ [ 615.780144] env[69367]: value = "task-4233697" [ 615.780144] env[69367]: _type = "Task" [ 615.780144] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 615.790159] env[69367]: DEBUG oslo_vmware.api [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Task: {'id': task-4233697, 'name': Rename_Task} progress is 5%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.822127] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e2f71031-15ee-4076-80dd-7c13cedc35df tempest-VolumesAssistedSnapshotsTest-171649522 tempest-VolumesAssistedSnapshotsTest-171649522-project-member] Acquiring lock "5341066e-fb7d-4951-935e-6188442981a5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 615.822367] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e2f71031-15ee-4076-80dd-7c13cedc35df tempest-VolumesAssistedSnapshotsTest-171649522 tempest-VolumesAssistedSnapshotsTest-171649522-project-member] Lock "5341066e-fb7d-4951-935e-6188442981a5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 615.924077] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5ef0996f-0500-429e-97b5-259cde98018c tempest-FloatingIPsAssociationTestJSON-1476451203 tempest-FloatingIPsAssociationTestJSON-1476451203-project-member] Acquiring lock "7a13d45a-1941-4caf-a510-34b11d78b5e7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 615.924302] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5ef0996f-0500-429e-97b5-259cde98018c tempest-FloatingIPsAssociationTestJSON-1476451203 tempest-FloatingIPsAssociationTestJSON-1476451203-project-member] Lock "7a13d45a-1941-4caf-a510-34b11d78b5e7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 615.978845] env[69367]: DEBUG nova.compute.manager [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Start building block device mappings for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 616.212985] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] Preparing fetch location {{(pid=69367) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 616.213682] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Creating directory with path [datastore1] vmware_temp/56df4b53-5c45-47de-94ef-170a9e59f915/2b099420-9152-4d93-9609-4c9317824c11 {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 616.219400] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-512db027-3074-405a-8929-f903cd829a19 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.237696] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Created directory with path [datastore1] vmware_temp/56df4b53-5c45-47de-94ef-170a9e59f915/2b099420-9152-4d93-9609-4c9317824c11 {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 616.237916] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] Fetch image to [datastore1] vmware_temp/56df4b53-5c45-47de-94ef-170a9e59f915/2b099420-9152-4d93-9609-4c9317824c11/tmp-sparse.vmdk {{(pid=69367) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 616.238248] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] Downloading image file data 2b099420-9152-4d93-9609-4c9317824c11 to [datastore1] vmware_temp/56df4b53-5c45-47de-94ef-170a9e59f915/2b099420-9152-4d93-9609-4c9317824c11/tmp-sparse.vmdk on the data store datastore1 {{(pid=69367) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 616.239117] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8e10e69-7ebb-4f79-9828-024c30228b7a {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.251999] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c9a97f4-69fe-487b-8cf0-102352859858 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.259411] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26cb15d1-ab47-4109-8bf8-035cbd2ab233 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.275712] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a83cc071-8522-479b-bbd9-04d6b8c32d81 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.284135] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2460185b-21c8-4f3d-8093-91b861823fc7 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.350037] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86252697-1769-4608-bd0f-b597506fa3f6 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.352986] env[69367]: DEBUG nova.compute.manager [None req-e2f71031-15ee-4076-80dd-7c13cedc35df tempest-VolumesAssistedSnapshotsTest-171649522 tempest-VolumesAssistedSnapshotsTest-171649522-project-member] [instance: 5341066e-fb7d-4951-935e-6188442981a5] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 616.360559] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a843ae20-414f-4984-81ab-992d313dd6f6 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.379015] env[69367]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-e3b365b9-e00b-4418-9015-d3283effcf4c {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.383019] env[69367]: DEBUG oslo_vmware.api [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Task: {'id': task-4233697, 'name': Rename_Task, 'duration_secs': 0.175523} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 616.383019] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e40afc95-ee07-4b59-984a-79fc49c7a7c3 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.387816] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] [instance: 937c05e9-06f1-4a5f-9f8c-ac40c262ce4e] Powering on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 616.387816] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f304bc81-676d-4a5f-9d3a-5500cf6aceec {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.402617] env[69367]: DEBUG nova.compute.provider_tree [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Inventory has not changed in ProviderTree for provider: 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 616.406308] env[69367]: DEBUG oslo_vmware.api [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Waiting for the task: (returnval){ [ 616.406308] env[69367]: value = "task-4233698" [ 616.406308] env[69367]: _type = "Task" [ 616.406308] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 616.414330] env[69367]: DEBUG oslo_vmware.api [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Task: {'id': task-4233698, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 616.427284] env[69367]: DEBUG nova.compute.manager [None req-5ef0996f-0500-429e-97b5-259cde98018c tempest-FloatingIPsAssociationTestJSON-1476451203 tempest-FloatingIPsAssociationTestJSON-1476451203-project-member] [instance: 7a13d45a-1941-4caf-a510-34b11d78b5e7] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 616.433383] env[69367]: DEBUG nova.virt.vmwareapi.images [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] Downloading image file data 2b099420-9152-4d93-9609-4c9317824c11 to the data store datastore1 {{(pid=69367) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 616.510463] env[69367]: DEBUG oslo_vmware.rw_handles [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/56df4b53-5c45-47de-94ef-170a9e59f915/2b099420-9152-4d93-9609-4c9317824c11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69367) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 616.693428] env[69367]: DEBUG nova.network.neutron [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] Successfully updated port: 4051893d-10cc-4cb9-8e30-089e8d3d4286 {{(pid=69367) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 616.763532] env[69367]: DEBUG nova.network.neutron [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] [instance: 1302cad6-55b7-4905-92c1-dfdd37042e30] Successfully updated port: c1cd0433-e331-4e76-af42-c5cd0421b041 {{(pid=69367) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 616.810421] env[69367]: DEBUG oslo_concurrency.lockutils [None req-3682f758-1480-4596-869e-15f72e534e72 tempest-ServersWithSpecificFlavorTestJSON-1357925327 tempest-ServersWithSpecificFlavorTestJSON-1357925327-project-member] Acquiring lock "f11c0d77-b53c-4d96-820d-bd3ff3a08955" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 616.810588] env[69367]: DEBUG oslo_concurrency.lockutils [None req-3682f758-1480-4596-869e-15f72e534e72 tempest-ServersWithSpecificFlavorTestJSON-1357925327 tempest-ServersWithSpecificFlavorTestJSON-1357925327-project-member] Lock "f11c0d77-b53c-4d96-820d-bd3ff3a08955" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 616.893858] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e2f71031-15ee-4076-80dd-7c13cedc35df tempest-VolumesAssistedSnapshotsTest-171649522 tempest-VolumesAssistedSnapshotsTest-171649522-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 616.908457] env[69367]: DEBUG nova.scheduler.client.report [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Inventory has not changed for provider 19ddf8be-7305-4f70-8366-52a9957232e6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 616.929979] env[69367]: DEBUG oslo_vmware.api [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Task: {'id': task-4233698, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 616.958276] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5ef0996f-0500-429e-97b5-259cde98018c tempest-FloatingIPsAssociationTestJSON-1476451203 tempest-FloatingIPsAssociationTestJSON-1476451203-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 616.997389] env[69367]: DEBUG nova.compute.manager [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Start spawning the instance on the hypervisor. {{(pid=69367) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 617.041492] env[69367]: DEBUG nova.virt.hardware [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-19T12:49:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-19T12:49:28Z,direct_url=,disk_format='vmdk',id=2b099420-9152-4d93-9609-4c9317824c11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cd7c200d5cd6461fb951580f8c764c42',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-19T12:49:29Z,virtual_size=,visibility=), allow threads: False {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 617.041770] env[69367]: DEBUG nova.virt.hardware [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Flavor limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 617.041945] env[69367]: DEBUG nova.virt.hardware [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Image limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 617.043039] env[69367]: DEBUG nova.virt.hardware [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Flavor pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 617.043039] env[69367]: DEBUG nova.virt.hardware [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Image pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 617.043039] env[69367]: DEBUG nova.virt.hardware [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 617.043039] env[69367]: DEBUG nova.virt.hardware [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 617.043216] env[69367]: DEBUG nova.virt.hardware [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 617.043397] env[69367]: DEBUG nova.virt.hardware [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Got 1 possible topologies {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 617.043437] env[69367]: DEBUG nova.virt.hardware [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 617.044802] env[69367]: DEBUG nova.virt.hardware [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 617.044802] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d35a46b-13c9-478a-865c-a0f62eb8d25a {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.062844] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13c2e067-39ae-40f5-9afa-dab62bb7df9f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.082907] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Instance VIF info [] {{(pid=69367) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 617.094240] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Creating folder: Project (cf1defcf82d646c8aa2c0111cbaa13e6). Parent ref: group-v837645. {{(pid=69367) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 617.098603] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-979ede05-caab-4c05-962c-31e9bf4c05ac {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.118025] env[69367]: INFO nova.virt.vmwareapi.vm_util [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Created folder: Project (cf1defcf82d646c8aa2c0111cbaa13e6) in parent group-v837645. [ 617.118099] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Creating folder: Instances. Parent ref: group-v837656. {{(pid=69367) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 617.118448] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0f89952f-487a-43e5-abe4-1bbb23a219b2 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.130118] env[69367]: INFO nova.virt.vmwareapi.vm_util [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Created folder: Instances in parent group-v837656. [ 617.130542] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 617.131158] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Creating VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 617.131408] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9c10e2c5-1d99-43b8-ad3e-0fda0ff9fa40 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.159412] env[69367]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 617.159412] env[69367]: value = "task-4233701" [ 617.159412] env[69367]: _type = "Task" [ 617.159412] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 617.170034] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4233701, 'name': CreateVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.200557] env[69367]: DEBUG oslo_concurrency.lockutils [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Acquiring lock "refresh_cache-5c7b2127-e875-4222-8148-a2ea60631c25" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 617.200839] env[69367]: DEBUG oslo_concurrency.lockutils [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Acquired lock "refresh_cache-5c7b2127-e875-4222-8148-a2ea60631c25" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 617.201128] env[69367]: DEBUG nova.network.neutron [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 617.266943] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Acquiring lock "refresh_cache-1302cad6-55b7-4905-92c1-dfdd37042e30" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 617.271620] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Acquired lock "refresh_cache-1302cad6-55b7-4905-92c1-dfdd37042e30" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 617.271620] env[69367]: DEBUG nova.network.neutron [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] [instance: 1302cad6-55b7-4905-92c1-dfdd37042e30] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 617.428124] env[69367]: DEBUG oslo_concurrency.lockutils [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.462s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 617.428750] env[69367]: DEBUG nova.compute.manager [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] Start building networks asynchronously for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 617.432300] env[69367]: DEBUG oslo_vmware.api [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Task: {'id': task-4233698, 'name': PowerOnVM_Task, 'duration_secs': 0.580481} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 617.439060] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 9.829s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 617.439285] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 617.439448] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69367) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 617.439799] env[69367]: DEBUG oslo_concurrency.lockutils [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.586s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 617.443192] env[69367]: INFO nova.compute.claims [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 617.447276] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] [instance: 937c05e9-06f1-4a5f-9f8c-ac40c262ce4e] Powered on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 617.447276] env[69367]: INFO nova.compute.manager [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] [instance: 937c05e9-06f1-4a5f-9f8c-ac40c262ce4e] Took 9.93 seconds to spawn the instance on the hypervisor. [ 617.451036] env[69367]: DEBUG nova.compute.manager [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] [instance: 937c05e9-06f1-4a5f-9f8c-ac40c262ce4e] Checking state {{(pid=69367) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 617.451036] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8b26fb7-a7c5-4a2d-a36d-19b8808f64b0 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.453849] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb1dccba-22ce-4e91-8d43-1e947eee66ff {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.458370] env[69367]: DEBUG oslo_vmware.rw_handles [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Completed reading data from the image iterator. {{(pid=69367) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 617.458370] env[69367]: DEBUG oslo_vmware.rw_handles [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/56df4b53-5c45-47de-94ef-170a9e59f915/2b099420-9152-4d93-9609-4c9317824c11/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69367) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 617.467197] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-882206b3-a7a0-48db-b6b5-906e57343fac {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.491488] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6132eb8c-6c3c-4217-91ba-50c53aaa9e15 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.506675] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97485089-8ada-4518-a881-4ff36e8448ad {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.552418] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180767MB free_disk=1GB free_vcpus=48 pci_devices=None {{(pid=69367) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 617.552418] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 617.553344] env[69367]: DEBUG nova.virt.vmwareapi.images [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] Downloaded image file data 2b099420-9152-4d93-9609-4c9317824c11 to vmware_temp/56df4b53-5c45-47de-94ef-170a9e59f915/2b099420-9152-4d93-9609-4c9317824c11/tmp-sparse.vmdk on the data store datastore1 {{(pid=69367) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 617.557051] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] Caching image {{(pid=69367) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 617.557051] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Copying Virtual Disk [datastore1] vmware_temp/56df4b53-5c45-47de-94ef-170a9e59f915/2b099420-9152-4d93-9609-4c9317824c11/tmp-sparse.vmdk to [datastore1] vmware_temp/56df4b53-5c45-47de-94ef-170a9e59f915/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 617.557051] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0a0773c3-ec34-4236-8f04-199fe858d698 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.566668] env[69367]: DEBUG oslo_vmware.api [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Waiting for the task: (returnval){ [ 617.566668] env[69367]: value = "task-4233703" [ 617.566668] env[69367]: _type = "Task" [ 617.566668] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 617.583547] env[69367]: DEBUG oslo_vmware.api [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Task: {'id': task-4233703, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.675140] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4233701, 'name': CreateVM_Task, 'duration_secs': 0.433397} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 617.675140] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Created VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 617.675140] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 617.675140] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 617.675140] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 617.675140] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c8e89f54-d1b8-4a03-8f56-7083f788aea0 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.680600] env[69367]: DEBUG oslo_vmware.api [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Waiting for the task: (returnval){ [ 617.680600] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52e6a6dc-ba3a-4e1e-216e-b9ceca5fbcb3" [ 617.680600] env[69367]: _type = "Task" [ 617.680600] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 617.692253] env[69367]: DEBUG oslo_vmware.api [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52e6a6dc-ba3a-4e1e-216e-b9ceca5fbcb3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.869295] env[69367]: DEBUG nova.network.neutron [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 617.955423] env[69367]: DEBUG nova.compute.utils [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Using /dev/sd instead of None {{(pid=69367) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 617.959513] env[69367]: DEBUG nova.compute.manager [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] Allocating IP information in the background. {{(pid=69367) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 617.960075] env[69367]: DEBUG nova.network.neutron [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] allocate_for_instance() {{(pid=69367) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 617.984967] env[69367]: INFO nova.compute.manager [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] [instance: 937c05e9-06f1-4a5f-9f8c-ac40c262ce4e] Took 14.83 seconds to build instance. [ 618.084747] env[69367]: DEBUG oslo_vmware.api [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Task: {'id': task-4233703, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.086435] env[69367]: DEBUG nova.network.neutron [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] [instance: 1302cad6-55b7-4905-92c1-dfdd37042e30] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 618.194708] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 618.195143] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Processing image 2b099420-9152-4d93-9609-4c9317824c11 {{(pid=69367) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 618.195426] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 618.352498] env[69367]: DEBUG nova.policy [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2957aecc3e2f49019e509ae1d92038be', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2375d6603eef45069be4a3541519002a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69367) authorize /opt/stack/nova/nova/policy.py:192}} [ 618.467618] env[69367]: DEBUG nova.compute.manager [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] Start building block device mappings for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 618.487219] env[69367]: DEBUG oslo_concurrency.lockutils [None req-6d88b232-94e5-4d08-bc7b-eb98e3ede295 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Lock "937c05e9-06f1-4a5f-9f8c-ac40c262ce4e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.343s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 618.532558] env[69367]: DEBUG nova.network.neutron [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] [instance: 1302cad6-55b7-4905-92c1-dfdd37042e30] Updating instance_info_cache with network_info: [{"id": "c1cd0433-e331-4e76-af42-c5cd0421b041", "address": "fa:16:3e:b1:48:3d", "network": {"id": "62ab7c93-8b0d-49ed-aa82-3bc315e190df", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.26", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cd7c200d5cd6461fb951580f8c764c42", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc1cd0433-e3", "ovs_interfaceid": "c1cd0433-e331-4e76-af42-c5cd0421b041", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 618.586969] env[69367]: DEBUG oslo_vmware.api [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Task: {'id': task-4233703, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.722962} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 618.586969] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Copied Virtual Disk [datastore1] vmware_temp/56df4b53-5c45-47de-94ef-170a9e59f915/2b099420-9152-4d93-9609-4c9317824c11/tmp-sparse.vmdk to [datastore1] vmware_temp/56df4b53-5c45-47de-94ef-170a9e59f915/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 618.586969] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Deleting the datastore file [datastore1] vmware_temp/56df4b53-5c45-47de-94ef-170a9e59f915/2b099420-9152-4d93-9609-4c9317824c11/tmp-sparse.vmdk {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 618.586969] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-663c31d0-8347-4dc0-97ee-d10cc85ff70d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.596962] env[69367]: DEBUG oslo_vmware.api [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Waiting for the task: (returnval){ [ 618.596962] env[69367]: value = "task-4233704" [ 618.596962] env[69367]: _type = "Task" [ 618.596962] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 618.613583] env[69367]: DEBUG oslo_vmware.api [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Task: {'id': task-4233704, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.771941] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-585fdfac-6ebb-41db-94d9-1c5c0fbd2cb6 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.780967] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a95e17b6-9922-4a4e-80f9-8420a091554e {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.821847] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48c77402-ebcc-4be4-8ec5-deb274ab7b82 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.829666] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ad48e68-9f5e-4a8b-94c9-88f7a1cef3b6 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.849579] env[69367]: DEBUG nova.compute.provider_tree [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Inventory has not changed in ProviderTree for provider: 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 618.852071] env[69367]: DEBUG nova.network.neutron [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] Updating instance_info_cache with network_info: [{"id": "4051893d-10cc-4cb9-8e30-089e8d3d4286", "address": "fa:16:3e:a8:2e:46", "network": {"id": "62ab7c93-8b0d-49ed-aa82-3bc315e190df", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.214", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cd7c200d5cd6461fb951580f8c764c42", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4051893d-10", "ovs_interfaceid": "4051893d-10cc-4cb9-8e30-089e8d3d4286", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 618.990817] env[69367]: DEBUG nova.compute.manager [None req-3682f758-1480-4596-869e-15f72e534e72 tempest-ServersWithSpecificFlavorTestJSON-1357925327 tempest-ServersWithSpecificFlavorTestJSON-1357925327-project-member] [instance: f11c0d77-b53c-4d96-820d-bd3ff3a08955] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 619.034604] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Releasing lock "refresh_cache-1302cad6-55b7-4905-92c1-dfdd37042e30" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 619.035010] env[69367]: DEBUG nova.compute.manager [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] [instance: 1302cad6-55b7-4905-92c1-dfdd37042e30] Instance network_info: |[{"id": "c1cd0433-e331-4e76-af42-c5cd0421b041", "address": "fa:16:3e:b1:48:3d", "network": {"id": "62ab7c93-8b0d-49ed-aa82-3bc315e190df", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.26", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cd7c200d5cd6461fb951580f8c764c42", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc1cd0433-e3", "ovs_interfaceid": "c1cd0433-e331-4e76-af42-c5cd0421b041", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69367) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 619.035576] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] [instance: 1302cad6-55b7-4905-92c1-dfdd37042e30] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b1:48:3d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'eb2eaecd-9701-4504-9fcb-fb1a420ead72', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c1cd0433-e331-4e76-af42-c5cd0421b041', 'vif_model': 'vmxnet3'}] {{(pid=69367) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 619.047089] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Creating folder: Project (f78c11c8a4c848e29c847f5e0d5b55f5). Parent ref: group-v837645. {{(pid=69367) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 619.048395] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bbb87c05-ee3d-43df-af2a-624b7b328cec {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.062759] env[69367]: INFO nova.virt.vmwareapi.vm_util [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Created folder: Project (f78c11c8a4c848e29c847f5e0d5b55f5) in parent group-v837645. [ 619.063020] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Creating folder: Instances. Parent ref: group-v837659. {{(pid=69367) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 619.063270] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-92a26bcb-be4e-4b06-83d3-16c0e4dff551 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.073839] env[69367]: INFO nova.virt.vmwareapi.vm_util [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Created folder: Instances in parent group-v837659. [ 619.074127] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 619.074358] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1302cad6-55b7-4905-92c1-dfdd37042e30] Creating VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 619.074565] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-884836bd-71ce-43ea-b8b8-8bab72747585 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.107518] env[69367]: DEBUG oslo_vmware.api [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Task: {'id': task-4233704, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.02944} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 619.109805] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Deleted the datastore file {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 619.109805] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Moving file from [datastore1] vmware_temp/56df4b53-5c45-47de-94ef-170a9e59f915/2b099420-9152-4d93-9609-4c9317824c11 to [datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11. {{(pid=69367) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 619.110074] env[69367]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 619.110074] env[69367]: value = "task-4233707" [ 619.110074] env[69367]: _type = "Task" [ 619.110074] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.111127] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-bec176bb-7e0c-4175-ba72-ce2513510d9e {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.123034] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4233707, 'name': CreateVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.124945] env[69367]: DEBUG oslo_vmware.api [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Waiting for the task: (returnval){ [ 619.124945] env[69367]: value = "task-4233708" [ 619.124945] env[69367]: _type = "Task" [ 619.124945] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.137634] env[69367]: DEBUG oslo_vmware.api [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Task: {'id': task-4233708, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.356221] env[69367]: DEBUG nova.scheduler.client.report [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Inventory has not changed for provider 19ddf8be-7305-4f70-8366-52a9957232e6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 619.361463] env[69367]: DEBUG oslo_concurrency.lockutils [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Releasing lock "refresh_cache-5c7b2127-e875-4222-8148-a2ea60631c25" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 619.361844] env[69367]: DEBUG nova.compute.manager [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] Instance network_info: |[{"id": "4051893d-10cc-4cb9-8e30-089e8d3d4286", "address": "fa:16:3e:a8:2e:46", "network": {"id": "62ab7c93-8b0d-49ed-aa82-3bc315e190df", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.214", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cd7c200d5cd6461fb951580f8c764c42", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4051893d-10", "ovs_interfaceid": "4051893d-10cc-4cb9-8e30-089e8d3d4286", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69367) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 619.363040] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a8:2e:46', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'eb2eaecd-9701-4504-9fcb-fb1a420ead72', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4051893d-10cc-4cb9-8e30-089e8d3d4286', 'vif_model': 'vmxnet3'}] {{(pid=69367) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 619.373673] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Creating folder: Project (25daec9a55d94273bf3088110afe7705). Parent ref: group-v837645. {{(pid=69367) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 619.373773] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d17c3619-b34a-4700-8552-eafb95280368 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.390939] env[69367]: INFO nova.virt.vmwareapi.vm_util [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Created folder: Project (25daec9a55d94273bf3088110afe7705) in parent group-v837645. [ 619.390939] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Creating folder: Instances. Parent ref: group-v837662. {{(pid=69367) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 619.392812] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-eb507053-8eca-4532-9531-574b6984a533 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.404947] env[69367]: INFO nova.virt.vmwareapi.vm_util [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Created folder: Instances in parent group-v837662. [ 619.404947] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 619.404947] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] Creating VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 619.404947] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-90a44dc0-22f3-4e40-9540-3c129aeae2bf {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.434133] env[69367]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 619.434133] env[69367]: value = "task-4233711" [ 619.434133] env[69367]: _type = "Task" [ 619.434133] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.444787] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4233711, 'name': CreateVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.482978] env[69367]: DEBUG nova.compute.manager [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] Start spawning the instance on the hypervisor. {{(pid=69367) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 619.528832] env[69367]: DEBUG nova.virt.hardware [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-19T12:49:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-19T12:49:28Z,direct_url=,disk_format='vmdk',id=2b099420-9152-4d93-9609-4c9317824c11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cd7c200d5cd6461fb951580f8c764c42',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-19T12:49:29Z,virtual_size=,visibility=), allow threads: False {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 619.528832] env[69367]: DEBUG nova.virt.hardware [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Flavor limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 619.529098] env[69367]: DEBUG nova.virt.hardware [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Image limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 619.529098] env[69367]: DEBUG nova.virt.hardware [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Flavor pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 619.529436] env[69367]: DEBUG nova.virt.hardware [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Image pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 619.529521] env[69367]: DEBUG nova.virt.hardware [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 619.529821] env[69367]: DEBUG nova.virt.hardware [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 619.530021] env[69367]: DEBUG nova.virt.hardware [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 619.530335] env[69367]: DEBUG nova.virt.hardware [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Got 1 possible topologies {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 619.530432] env[69367]: DEBUG nova.virt.hardware [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 619.530592] env[69367]: DEBUG nova.virt.hardware [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 619.531636] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bb696d9-9a9f-4738-8193-ffe7958276bd {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.542685] env[69367]: DEBUG oslo_concurrency.lockutils [None req-3682f758-1480-4596-869e-15f72e534e72 tempest-ServersWithSpecificFlavorTestJSON-1357925327 tempest-ServersWithSpecificFlavorTestJSON-1357925327-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 619.544158] env[69367]: DEBUG oslo_concurrency.lockutils [None req-7c66dfce-abbe-4ff0-97b3-5ac8ccf941b4 tempest-ServerAddressesTestJSON-1140368918 tempest-ServerAddressesTestJSON-1140368918-project-member] Acquiring lock "07a65426-e348-4f6f-8898-45409e15c554" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 619.544425] env[69367]: DEBUG oslo_concurrency.lockutils [None req-7c66dfce-abbe-4ff0-97b3-5ac8ccf941b4 tempest-ServerAddressesTestJSON-1140368918 tempest-ServerAddressesTestJSON-1140368918-project-member] Lock "07a65426-e348-4f6f-8898-45409e15c554" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 619.550326] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fc323c7-fc39-4f05-bdc9-b83655f6980d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.558723] env[69367]: DEBUG nova.compute.manager [req-0b6eade3-d9ba-46f3-abd8-3714336d71d4 req-d905a5d0-a429-462b-b468-ff1547acb146 service nova] [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] Received event network-vif-plugged-4051893d-10cc-4cb9-8e30-089e8d3d4286 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 619.559009] env[69367]: DEBUG oslo_concurrency.lockutils [req-0b6eade3-d9ba-46f3-abd8-3714336d71d4 req-d905a5d0-a429-462b-b468-ff1547acb146 service nova] Acquiring lock "5c7b2127-e875-4222-8148-a2ea60631c25-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 619.559280] env[69367]: DEBUG oslo_concurrency.lockutils [req-0b6eade3-d9ba-46f3-abd8-3714336d71d4 req-d905a5d0-a429-462b-b468-ff1547acb146 service nova] Lock "5c7b2127-e875-4222-8148-a2ea60631c25-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 619.559487] env[69367]: DEBUG oslo_concurrency.lockutils [req-0b6eade3-d9ba-46f3-abd8-3714336d71d4 req-d905a5d0-a429-462b-b468-ff1547acb146 service nova] Lock "5c7b2127-e875-4222-8148-a2ea60631c25-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 619.559660] env[69367]: DEBUG nova.compute.manager [req-0b6eade3-d9ba-46f3-abd8-3714336d71d4 req-d905a5d0-a429-462b-b468-ff1547acb146 service nova] [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] No waiting events found dispatching network-vif-plugged-4051893d-10cc-4cb9-8e30-089e8d3d4286 {{(pid=69367) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 619.559865] env[69367]: WARNING nova.compute.manager [req-0b6eade3-d9ba-46f3-abd8-3714336d71d4 req-d905a5d0-a429-462b-b468-ff1547acb146 service nova] [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] Received unexpected event network-vif-plugged-4051893d-10cc-4cb9-8e30-089e8d3d4286 for instance with vm_state building and task_state spawning. [ 619.624079] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4233707, 'name': CreateVM_Task, 'duration_secs': 0.466533} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 619.624284] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1302cad6-55b7-4905-92c1-dfdd37042e30] Created VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 619.635481] env[69367]: DEBUG oslo_vmware.api [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Task: {'id': task-4233708, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.027708} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 619.635741] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] File moved {{(pid=69367) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 619.636196] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] Cleaning up location [datastore1] vmware_temp/56df4b53-5c45-47de-94ef-170a9e59f915 {{(pid=69367) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 619.636381] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Deleting the datastore file [datastore1] vmware_temp/56df4b53-5c45-47de-94ef-170a9e59f915 {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 619.636639] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9c97f900-ee7b-4453-8778-3d008fb7d2b1 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.645571] env[69367]: DEBUG oslo_vmware.api [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Waiting for the task: (returnval){ [ 619.645571] env[69367]: value = "task-4233712" [ 619.645571] env[69367]: _type = "Task" [ 619.645571] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.654975] env[69367]: DEBUG oslo_vmware.api [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Task: {'id': task-4233712, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.656580] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 619.656580] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 619.656817] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 619.657124] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-30ee5356-9139-4bd4-aa3b-70e3f9bd53d8 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.663193] env[69367]: DEBUG oslo_vmware.api [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Waiting for the task: (returnval){ [ 619.663193] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52a89aff-7553-40b5-2e3b-99b764dd62ec" [ 619.663193] env[69367]: _type = "Task" [ 619.663193] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.674617] env[69367]: DEBUG oslo_vmware.api [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52a89aff-7553-40b5-2e3b-99b764dd62ec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.806257] env[69367]: DEBUG nova.network.neutron [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] Successfully created port: 013efad5-0b57-43e9-b662-10e31d24d8af {{(pid=69367) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 619.864576] env[69367]: DEBUG oslo_concurrency.lockutils [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.425s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 619.865266] env[69367]: DEBUG nova.compute.manager [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] Start building networks asynchronously for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 619.872373] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.749s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 619.874527] env[69367]: INFO nova.compute.claims [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] [instance: 92bdb1b1-d8ab-46b2-9037-ee8fea4642ce] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 619.900225] env[69367]: DEBUG nova.compute.manager [req-24249dc7-8755-423c-9b54-0d95b08e82e9 req-2afd57f5-0bfc-4a2e-993f-34743edde80d service nova] [instance: 1302cad6-55b7-4905-92c1-dfdd37042e30] Received event network-vif-plugged-c1cd0433-e331-4e76-af42-c5cd0421b041 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 619.900685] env[69367]: DEBUG oslo_concurrency.lockutils [req-24249dc7-8755-423c-9b54-0d95b08e82e9 req-2afd57f5-0bfc-4a2e-993f-34743edde80d service nova] Acquiring lock "1302cad6-55b7-4905-92c1-dfdd37042e30-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 619.901058] env[69367]: DEBUG oslo_concurrency.lockutils [req-24249dc7-8755-423c-9b54-0d95b08e82e9 req-2afd57f5-0bfc-4a2e-993f-34743edde80d service nova] Lock "1302cad6-55b7-4905-92c1-dfdd37042e30-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 619.901434] env[69367]: DEBUG oslo_concurrency.lockutils [req-24249dc7-8755-423c-9b54-0d95b08e82e9 req-2afd57f5-0bfc-4a2e-993f-34743edde80d service nova] Lock "1302cad6-55b7-4905-92c1-dfdd37042e30-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 619.901918] env[69367]: DEBUG nova.compute.manager [req-24249dc7-8755-423c-9b54-0d95b08e82e9 req-2afd57f5-0bfc-4a2e-993f-34743edde80d service nova] [instance: 1302cad6-55b7-4905-92c1-dfdd37042e30] No waiting events found dispatching network-vif-plugged-c1cd0433-e331-4e76-af42-c5cd0421b041 {{(pid=69367) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 619.902099] env[69367]: WARNING nova.compute.manager [req-24249dc7-8755-423c-9b54-0d95b08e82e9 req-2afd57f5-0bfc-4a2e-993f-34743edde80d service nova] [instance: 1302cad6-55b7-4905-92c1-dfdd37042e30] Received unexpected event network-vif-plugged-c1cd0433-e331-4e76-af42-c5cd0421b041 for instance with vm_state building and task_state spawning. [ 619.956546] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4233711, 'name': CreateVM_Task} progress is 99%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.163944] env[69367]: DEBUG oslo_vmware.api [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Task: {'id': task-4233712, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.028519} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 620.163944] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Deleted the datastore file {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 620.163944] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1f2392e8-c0e6-4ff2-8e2a-d069d2ada5a2 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.175969] env[69367]: DEBUG oslo_vmware.api [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Waiting for the task: (returnval){ [ 620.175969] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52fa7197-8f57-8a73-489b-f4aaa3b0ca42" [ 620.175969] env[69367]: _type = "Task" [ 620.175969] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 620.180775] env[69367]: DEBUG oslo_vmware.api [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52a89aff-7553-40b5-2e3b-99b764dd62ec, 'name': SearchDatastore_Task, 'duration_secs': 0.028941} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 620.186097] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 620.186670] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] [instance: 1302cad6-55b7-4905-92c1-dfdd37042e30] Processing image 2b099420-9152-4d93-9609-4c9317824c11 {{(pid=69367) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 620.186803] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 620.186949] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 620.187133] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 620.187979] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1cfea865-0160-408d-8d48-15963c50fc45 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.200964] env[69367]: DEBUG oslo_vmware.api [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52fa7197-8f57-8a73-489b-f4aaa3b0ca42, 'name': SearchDatastore_Task, 'duration_secs': 0.010098} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 620.201737] env[69367]: DEBUG oslo_concurrency.lockutils [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 620.202103] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore1] a358ce6d-9826-4ddb-8c2f-51bac8db59d4/a358ce6d-9826-4ddb-8c2f-51bac8db59d4.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 620.202404] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 620.202562] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69367) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 620.205251] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 620.205251] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 620.205446] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fe62f9eb-0d81-433a-8909-94efa3f175ac {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.207445] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-25fea77b-028c-4f23-aa87-a62fc9e85830 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.211714] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6909c32c-3e58-4da9-a9bb-bab0c3f2774b {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.220078] env[69367]: DEBUG oslo_vmware.api [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Waiting for the task: (returnval){ [ 620.220078] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52ec69aa-d5c2-074d-433c-3b7da996b3f3" [ 620.220078] env[69367]: _type = "Task" [ 620.220078] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 620.221576] env[69367]: DEBUG oslo_vmware.api [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Waiting for the task: (returnval){ [ 620.221576] env[69367]: value = "task-4233713" [ 620.221576] env[69367]: _type = "Task" [ 620.221576] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 620.230804] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 620.230971] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69367) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 620.232240] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-55191f2d-cd6f-49ff-8f1b-7f864be77a81 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.238595] env[69367]: DEBUG oslo_vmware.api [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52ec69aa-d5c2-074d-433c-3b7da996b3f3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.242889] env[69367]: DEBUG oslo_vmware.api [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Task: {'id': task-4233713, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.244287] env[69367]: DEBUG oslo_vmware.api [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Waiting for the task: (returnval){ [ 620.244287] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52349199-44f3-1c62-7284-0a94b79c2264" [ 620.244287] env[69367]: _type = "Task" [ 620.244287] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 620.253516] env[69367]: DEBUG oslo_vmware.api [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52349199-44f3-1c62-7284-0a94b79c2264, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.382376] env[69367]: DEBUG nova.compute.utils [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Using /dev/sd instead of None {{(pid=69367) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 620.384612] env[69367]: DEBUG nova.compute.manager [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] Allocating IP information in the background. {{(pid=69367) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 620.384612] env[69367]: DEBUG nova.network.neutron [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] allocate_for_instance() {{(pid=69367) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 620.448742] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4233711, 'name': CreateVM_Task, 'duration_secs': 0.553007} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 620.448986] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] Created VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 620.449877] env[69367]: DEBUG oslo_concurrency.lockutils [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 620.450086] env[69367]: DEBUG oslo_concurrency.lockutils [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 620.450446] env[69367]: DEBUG oslo_concurrency.lockutils [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 620.450754] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9f2d3df1-0442-424c-b60d-9d2ee9c82a10 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.458845] env[69367]: DEBUG oslo_vmware.api [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Waiting for the task: (returnval){ [ 620.458845] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52f43acd-a79d-c7c7-4648-8a9653d2f938" [ 620.458845] env[69367]: _type = "Task" [ 620.458845] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 620.470892] env[69367]: DEBUG oslo_vmware.api [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52f43acd-a79d-c7c7-4648-8a9653d2f938, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.473698] env[69367]: DEBUG nova.compute.manager [None req-a3eb0a0b-3bec-4647-bad4-66c1d4072e42 tempest-ServerDiagnosticsV248Test-1087128128 tempest-ServerDiagnosticsV248Test-1087128128-project-admin] [instance: 937c05e9-06f1-4a5f-9f8c-ac40c262ce4e] Checking state {{(pid=69367) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 620.475472] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3157c85d-bc98-4d3d-9685-f9f8ec7d326e {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.490042] env[69367]: INFO nova.compute.manager [None req-a3eb0a0b-3bec-4647-bad4-66c1d4072e42 tempest-ServerDiagnosticsV248Test-1087128128 tempest-ServerDiagnosticsV248Test-1087128128-project-admin] [instance: 937c05e9-06f1-4a5f-9f8c-ac40c262ce4e] Retrieving diagnostics [ 620.490042] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99df129b-a38f-4f8c-ad19-3490da299478 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.738631] env[69367]: DEBUG oslo_vmware.api [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52ec69aa-d5c2-074d-433c-3b7da996b3f3, 'name': SearchDatastore_Task, 'duration_secs': 0.022931} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 620.745125] env[69367]: DEBUG oslo_vmware.api [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Task: {'id': task-4233713, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.503857} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 620.745660] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-48259021-d9cb-4203-8ebf-4b5193bb1699 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.751596] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore1] a358ce6d-9826-4ddb-8c2f-51bac8db59d4/a358ce6d-9826-4ddb-8c2f-51bac8db59d4.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 620.751810] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] Extending root virtual disk to 1048576 {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 620.755481] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b1039340-d13c-43ec-9ce0-25877fa5f52d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.762859] env[69367]: DEBUG oslo_vmware.api [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Waiting for the task: (returnval){ [ 620.762859] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52d29f67-a403-00f6-fd44-f229df6ff80b" [ 620.762859] env[69367]: _type = "Task" [ 620.762859] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 620.771736] env[69367]: DEBUG oslo_vmware.api [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52349199-44f3-1c62-7284-0a94b79c2264, 'name': SearchDatastore_Task, 'duration_secs': 0.009859} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 620.777247] env[69367]: DEBUG oslo_vmware.api [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Waiting for the task: (returnval){ [ 620.777247] env[69367]: value = "task-4233714" [ 620.777247] env[69367]: _type = "Task" [ 620.777247] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 620.777548] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d90a41f3-8fad-47f4-a1a4-b4f2c7c5796b {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.787839] env[69367]: DEBUG oslo_vmware.api [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52d29f67-a403-00f6-fd44-f229df6ff80b, 'name': SearchDatastore_Task, 'duration_secs': 0.009701} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 620.788550] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 620.788683] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore2] 1302cad6-55b7-4905-92c1-dfdd37042e30/1302cad6-55b7-4905-92c1-dfdd37042e30.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 620.788949] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d79fd3f6-b3c3-45b2-a754-949e37e946e5 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.795821] env[69367]: DEBUG oslo_vmware.api [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Task: {'id': task-4233714, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.796336] env[69367]: DEBUG oslo_vmware.api [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Waiting for the task: (returnval){ [ 620.796336] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52c1bbda-d0c8-ffc7-f85d-15ae19dbda2a" [ 620.796336] env[69367]: _type = "Task" [ 620.796336] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 620.802068] env[69367]: DEBUG oslo_vmware.api [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Waiting for the task: (returnval){ [ 620.802068] env[69367]: value = "task-4233715" [ 620.802068] env[69367]: _type = "Task" [ 620.802068] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 620.810915] env[69367]: DEBUG oslo_vmware.api [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52c1bbda-d0c8-ffc7-f85d-15ae19dbda2a, 'name': SearchDatastore_Task, 'duration_secs': 0.012417} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 620.811918] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 620.812463] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore1] 4e346ed1-36e9-421d-975f-e8bb6f05c0a0/4e346ed1-36e9-421d-975f-e8bb6f05c0a0.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 620.812902] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-171ce5c9-223b-4cc6-bf97-e644768ab479 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.819207] env[69367]: DEBUG oslo_vmware.api [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Task: {'id': task-4233715, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.822793] env[69367]: DEBUG nova.policy [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '912c460bfb3f4fd58d74d7e24d6e6f23', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '06c46b0af1af4a788c5e7159fc2daa3d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69367) authorize /opt/stack/nova/nova/policy.py:192}} [ 620.826535] env[69367]: DEBUG oslo_vmware.api [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Waiting for the task: (returnval){ [ 620.826535] env[69367]: value = "task-4233716" [ 620.826535] env[69367]: _type = "Task" [ 620.826535] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 620.836071] env[69367]: DEBUG oslo_vmware.api [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Task: {'id': task-4233716, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.889195] env[69367]: DEBUG nova.compute.manager [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] Start building block device mappings for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 620.973280] env[69367]: DEBUG oslo_vmware.api [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52f43acd-a79d-c7c7-4648-8a9653d2f938, 'name': SearchDatastore_Task, 'duration_secs': 0.051802} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 620.974013] env[69367]: DEBUG oslo_concurrency.lockutils [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 620.974189] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] Processing image 2b099420-9152-4d93-9609-4c9317824c11 {{(pid=69367) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 620.974444] env[69367]: DEBUG oslo_concurrency.lockutils [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 620.974600] env[69367]: DEBUG oslo_concurrency.lockutils [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 620.975053] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 620.975668] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8da4bfd5-fb8e-4727-925e-e9a6668339dd {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.992436] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 620.992646] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69367) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 620.996550] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b7b8907a-9f68-4a75-a154-25f9176c477b {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.008741] env[69367]: DEBUG oslo_vmware.api [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Waiting for the task: (returnval){ [ 621.008741] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]5263d436-2ff9-a22e-a9cf-1ba0ecc89e40" [ 621.008741] env[69367]: _type = "Task" [ 621.008741] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 621.019670] env[69367]: DEBUG oslo_vmware.api [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]5263d436-2ff9-a22e-a9cf-1ba0ecc89e40, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.154934] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc2ff691-c329-401c-b58f-c26e0960177d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.164825] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e48ba0f-83d0-4bcb-aee7-6aae84abc1f8 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.205148] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e002c7ff-8088-42db-b92c-fedae8951b4f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.216844] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a18b416e-3ea8-4291-98cf-d33138d3af1d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.234810] env[69367]: DEBUG nova.compute.provider_tree [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Inventory has not changed in ProviderTree for provider: 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 621.294329] env[69367]: DEBUG oslo_vmware.api [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Task: {'id': task-4233714, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.102881} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 621.294329] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] Extended root virtual disk {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 621.296030] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bf98f8e-31a2-4564-af51-a8b299c46c8e {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.321022] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] Reconfiguring VM instance instance-00000004 to attach disk [datastore1] a358ce6d-9826-4ddb-8c2f-51bac8db59d4/a358ce6d-9826-4ddb-8c2f-51bac8db59d4.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 621.326145] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-613d230d-a13c-42db-85e5-422098a60133 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.363177] env[69367]: DEBUG oslo_vmware.api [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Task: {'id': task-4233715, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.364413] env[69367]: DEBUG oslo_vmware.api [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Waiting for the task: (returnval){ [ 621.364413] env[69367]: value = "task-4233717" [ 621.364413] env[69367]: _type = "Task" [ 621.364413] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 621.374661] env[69367]: DEBUG oslo_vmware.api [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Task: {'id': task-4233716, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.384472] env[69367]: DEBUG oslo_vmware.api [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Task: {'id': task-4233717, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.522624] env[69367]: DEBUG oslo_vmware.api [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]5263d436-2ff9-a22e-a9cf-1ba0ecc89e40, 'name': SearchDatastore_Task, 'duration_secs': 0.057768} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 621.523261] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6198f88f-4d44-408a-9a78-9e4a22a15631 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.531316] env[69367]: DEBUG oslo_vmware.api [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Waiting for the task: (returnval){ [ 621.531316] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]5292352d-eaec-e398-217c-e2534c3fe23b" [ 621.531316] env[69367]: _type = "Task" [ 621.531316] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 621.542431] env[69367]: DEBUG oslo_vmware.api [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]5292352d-eaec-e398-217c-e2534c3fe23b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.739080] env[69367]: DEBUG nova.scheduler.client.report [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Inventory has not changed for provider 19ddf8be-7305-4f70-8366-52a9957232e6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 621.829734] env[69367]: DEBUG oslo_vmware.api [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Task: {'id': task-4233715, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.759926} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 621.830227] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore2] 1302cad6-55b7-4905-92c1-dfdd37042e30/1302cad6-55b7-4905-92c1-dfdd37042e30.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 621.830572] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] [instance: 1302cad6-55b7-4905-92c1-dfdd37042e30] Extending root virtual disk to 1048576 {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 621.830939] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fb8c09e7-e696-4592-a872-3552ce05ba42 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.841519] env[69367]: DEBUG oslo_vmware.api [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Waiting for the task: (returnval){ [ 621.841519] env[69367]: value = "task-4233718" [ 621.841519] env[69367]: _type = "Task" [ 621.841519] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 621.858559] env[69367]: DEBUG oslo_vmware.api [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Task: {'id': task-4233718, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.869069] env[69367]: DEBUG oslo_vmware.api [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Task: {'id': task-4233716, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.608709} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 621.873536] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore1] 4e346ed1-36e9-421d-975f-e8bb6f05c0a0/4e346ed1-36e9-421d-975f-e8bb6f05c0a0.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 621.873536] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Extending root virtual disk to 1048576 {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 621.873762] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-388ee7a0-0d8c-43fa-a5ac-7c7f0ebb5236 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.883862] env[69367]: DEBUG oslo_vmware.api [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Task: {'id': task-4233717, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.885399] env[69367]: DEBUG oslo_vmware.api [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Waiting for the task: (returnval){ [ 621.885399] env[69367]: value = "task-4233719" [ 621.885399] env[69367]: _type = "Task" [ 621.885399] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 621.898811] env[69367]: DEBUG oslo_vmware.api [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Task: {'id': task-4233719, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.901190] env[69367]: DEBUG nova.compute.manager [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] Start spawning the instance on the hypervisor. {{(pid=69367) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 621.953693] env[69367]: DEBUG nova.virt.hardware [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-19T12:49:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-19T12:49:28Z,direct_url=,disk_format='vmdk',id=2b099420-9152-4d93-9609-4c9317824c11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cd7c200d5cd6461fb951580f8c764c42',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-19T12:49:29Z,virtual_size=,visibility=), allow threads: False {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 621.961056] env[69367]: DEBUG nova.virt.hardware [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Flavor limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 621.961056] env[69367]: DEBUG nova.virt.hardware [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Image limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 621.961056] env[69367]: DEBUG nova.virt.hardware [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Flavor pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 621.961056] env[69367]: DEBUG nova.virt.hardware [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Image pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 621.961056] env[69367]: DEBUG nova.virt.hardware [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 621.961466] env[69367]: DEBUG nova.virt.hardware [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 621.961466] env[69367]: DEBUG nova.virt.hardware [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 621.961466] env[69367]: DEBUG nova.virt.hardware [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Got 1 possible topologies {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 621.961466] env[69367]: DEBUG nova.virt.hardware [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 621.961466] env[69367]: DEBUG nova.virt.hardware [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 621.961624] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69536346-7c16-4148-b28a-61aa41d1d6b3 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.973760] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9815bc82-2555-4550-b792-610b9bddbee2 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.047119] env[69367]: DEBUG oslo_vmware.api [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]5292352d-eaec-e398-217c-e2534c3fe23b, 'name': SearchDatastore_Task, 'duration_secs': 0.009944} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.047815] env[69367]: DEBUG oslo_concurrency.lockutils [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 622.048807] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore1] 5c7b2127-e875-4222-8148-a2ea60631c25/5c7b2127-e875-4222-8148-a2ea60631c25.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 622.049838] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-26d25718-9158-4efe-b407-57454e4b4376 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.059415] env[69367]: DEBUG oslo_vmware.api [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Waiting for the task: (returnval){ [ 622.059415] env[69367]: value = "task-4233720" [ 622.059415] env[69367]: _type = "Task" [ 622.059415] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.074175] env[69367]: DEBUG oslo_vmware.api [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Task: {'id': task-4233720, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.245163] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.373s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 622.245749] env[69367]: DEBUG nova.compute.manager [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] [instance: 92bdb1b1-d8ab-46b2-9037-ee8fea4642ce] Start building networks asynchronously for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 622.251907] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e2f71031-15ee-4076-80dd-7c13cedc35df tempest-VolumesAssistedSnapshotsTest-171649522 tempest-VolumesAssistedSnapshotsTest-171649522-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.355s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 622.254031] env[69367]: INFO nova.compute.claims [None req-e2f71031-15ee-4076-80dd-7c13cedc35df tempest-VolumesAssistedSnapshotsTest-171649522 tempest-VolumesAssistedSnapshotsTest-171649522-project-member] [instance: 5341066e-fb7d-4951-935e-6188442981a5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 622.364340] env[69367]: DEBUG oslo_vmware.api [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Task: {'id': task-4233718, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.094163} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.365095] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] [instance: 1302cad6-55b7-4905-92c1-dfdd37042e30] Extended root virtual disk {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 622.370061] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81ae900b-dcc1-486c-a048-6a7a93d3ea94 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.382975] env[69367]: DEBUG oslo_vmware.api [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Task: {'id': task-4233717, 'name': ReconfigVM_Task, 'duration_secs': 0.686615} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.392662] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] Reconfigured VM instance instance-00000004 to attach disk [datastore1] a358ce6d-9826-4ddb-8c2f-51bac8db59d4/a358ce6d-9826-4ddb-8c2f-51bac8db59d4.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 622.403046] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] [instance: 1302cad6-55b7-4905-92c1-dfdd37042e30] Reconfiguring VM instance instance-00000002 to attach disk [datastore2] 1302cad6-55b7-4905-92c1-dfdd37042e30/1302cad6-55b7-4905-92c1-dfdd37042e30.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 622.403525] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7b93595f-6313-40fe-becc-1ecdd6dc24aa {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.410806] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-caef69ae-1e74-4cea-bf90-39efc96223cb {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.436458] env[69367]: DEBUG oslo_vmware.api [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Task: {'id': task-4233719, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078278} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.439200] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Extended root virtual disk {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 622.439596] env[69367]: DEBUG oslo_vmware.api [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Waiting for the task: (returnval){ [ 622.439596] env[69367]: value = "task-4233722" [ 622.439596] env[69367]: _type = "Task" [ 622.439596] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.439917] env[69367]: DEBUG oslo_vmware.api [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Waiting for the task: (returnval){ [ 622.439917] env[69367]: value = "task-4233721" [ 622.439917] env[69367]: _type = "Task" [ 622.439917] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.440604] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccaa0915-77a2-4395-badb-58f606e3d6e0 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.464978] env[69367]: DEBUG oslo_vmware.api [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Task: {'id': task-4233721, 'name': Rename_Task} progress is 14%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.475687] env[69367]: DEBUG oslo_vmware.api [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Task: {'id': task-4233722, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.486027] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Reconfiguring VM instance instance-00000005 to attach disk [datastore1] 4e346ed1-36e9-421d-975f-e8bb6f05c0a0/4e346ed1-36e9-421d-975f-e8bb6f05c0a0.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 622.486277] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9256375c-8c42-48c9-8b38-6fa373800a91 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.511362] env[69367]: DEBUG oslo_vmware.api [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Waiting for the task: (returnval){ [ 622.511362] env[69367]: value = "task-4233723" [ 622.511362] env[69367]: _type = "Task" [ 622.511362] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.524054] env[69367]: DEBUG oslo_vmware.api [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Task: {'id': task-4233723, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.575673] env[69367]: DEBUG oslo_vmware.api [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Task: {'id': task-4233720, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.760552] env[69367]: DEBUG nova.compute.utils [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Using /dev/sd instead of None {{(pid=69367) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 622.765720] env[69367]: DEBUG nova.compute.manager [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] [instance: 92bdb1b1-d8ab-46b2-9037-ee8fea4642ce] Allocating IP information in the background. {{(pid=69367) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 622.765720] env[69367]: DEBUG nova.network.neutron [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] [instance: 92bdb1b1-d8ab-46b2-9037-ee8fea4642ce] allocate_for_instance() {{(pid=69367) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 622.879074] env[69367]: DEBUG nova.network.neutron [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] Successfully created port: b6a0688d-a5a2-4937-9ac7-25b53f9b001d {{(pid=69367) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 622.933303] env[69367]: DEBUG nova.policy [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '20a486bd77ec4dacbf708b2f1b2bb9ea', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4840471cf7844a1aac397d7ee7db12d4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69367) authorize /opt/stack/nova/nova/policy.py:192}} [ 622.961332] env[69367]: DEBUG oslo_vmware.api [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Task: {'id': task-4233721, 'name': Rename_Task, 'duration_secs': 0.190309} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.965772] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] Powering on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 622.966219] env[69367]: DEBUG oslo_vmware.api [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Task: {'id': task-4233722, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.966536] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-10556561-8341-4beb-bcb3-726f07e7d7d5 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.982506] env[69367]: DEBUG oslo_vmware.api [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Waiting for the task: (returnval){ [ 622.982506] env[69367]: value = "task-4233724" [ 622.982506] env[69367]: _type = "Task" [ 622.982506] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.998210] env[69367]: DEBUG oslo_vmware.api [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Task: {'id': task-4233724, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.024165] env[69367]: DEBUG oslo_vmware.api [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Task: {'id': task-4233723, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.081033] env[69367]: DEBUG oslo_vmware.api [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Task: {'id': task-4233720, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.555327} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.081730] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore1] 5c7b2127-e875-4222-8148-a2ea60631c25/5c7b2127-e875-4222-8148-a2ea60631c25.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 623.082069] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] Extending root virtual disk to 1048576 {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 623.082534] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5c84d0dd-1efe-4159-b0b5-11cce3b5afd7 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.093632] env[69367]: DEBUG oslo_vmware.api [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Waiting for the task: (returnval){ [ 623.093632] env[69367]: value = "task-4233725" [ 623.093632] env[69367]: _type = "Task" [ 623.093632] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.105323] env[69367]: DEBUG oslo_vmware.api [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Task: {'id': task-4233725, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.265466] env[69367]: DEBUG nova.compute.manager [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] [instance: 92bdb1b1-d8ab-46b2-9037-ee8fea4642ce] Start building block device mappings for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 623.461616] env[69367]: DEBUG oslo_vmware.api [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Task: {'id': task-4233722, 'name': ReconfigVM_Task, 'duration_secs': 0.627074} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.461992] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] [instance: 1302cad6-55b7-4905-92c1-dfdd37042e30] Reconfigured VM instance instance-00000002 to attach disk [datastore2] 1302cad6-55b7-4905-92c1-dfdd37042e30/1302cad6-55b7-4905-92c1-dfdd37042e30.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 623.463014] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-86bf7d82-1dab-472b-bcf8-5fcd073c52aa {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.479163] env[69367]: DEBUG oslo_vmware.api [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Waiting for the task: (returnval){ [ 623.479163] env[69367]: value = "task-4233726" [ 623.479163] env[69367]: _type = "Task" [ 623.479163] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.496913] env[69367]: DEBUG oslo_concurrency.lockutils [None req-af1ecc9b-a257-426e-95f0-d09e8a0d0989 tempest-ServersTestBootFromVolume-945135387 tempest-ServersTestBootFromVolume-945135387-project-member] Acquiring lock "1df0055c-938e-4048-938c-37590b0138ac" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 623.497424] env[69367]: DEBUG oslo_concurrency.lockutils [None req-af1ecc9b-a257-426e-95f0-d09e8a0d0989 tempest-ServersTestBootFromVolume-945135387 tempest-ServersTestBootFromVolume-945135387-project-member] Lock "1df0055c-938e-4048-938c-37590b0138ac" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 623.506374] env[69367]: DEBUG oslo_vmware.api [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Task: {'id': task-4233726, 'name': Rename_Task} progress is 14%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.520345] env[69367]: DEBUG nova.compute.manager [req-0d179373-235b-4382-9909-dc54845809f3 req-ef12c1cd-662d-4e7a-a27c-113a707e8b48 service nova] [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] Received event network-changed-4051893d-10cc-4cb9-8e30-089e8d3d4286 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 623.520345] env[69367]: DEBUG nova.compute.manager [req-0d179373-235b-4382-9909-dc54845809f3 req-ef12c1cd-662d-4e7a-a27c-113a707e8b48 service nova] [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] Refreshing instance network info cache due to event network-changed-4051893d-10cc-4cb9-8e30-089e8d3d4286. {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11772}} [ 623.520345] env[69367]: DEBUG oslo_concurrency.lockutils [req-0d179373-235b-4382-9909-dc54845809f3 req-ef12c1cd-662d-4e7a-a27c-113a707e8b48 service nova] Acquiring lock "refresh_cache-5c7b2127-e875-4222-8148-a2ea60631c25" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 623.520345] env[69367]: DEBUG oslo_concurrency.lockutils [req-0d179373-235b-4382-9909-dc54845809f3 req-ef12c1cd-662d-4e7a-a27c-113a707e8b48 service nova] Acquired lock "refresh_cache-5c7b2127-e875-4222-8148-a2ea60631c25" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 623.520345] env[69367]: DEBUG nova.network.neutron [req-0d179373-235b-4382-9909-dc54845809f3 req-ef12c1cd-662d-4e7a-a27c-113a707e8b48 service nova] [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] Refreshing network info cache for port 4051893d-10cc-4cb9-8e30-089e8d3d4286 {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 623.526283] env[69367]: DEBUG oslo_vmware.api [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Task: {'id': task-4233724, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.543124] env[69367]: DEBUG nova.compute.manager [req-564381c2-259f-4051-b674-0c2e3d42b359 req-928c76ce-6fec-4af1-8bd6-c5fa8547e3a9 service nova] [instance: 1302cad6-55b7-4905-92c1-dfdd37042e30] Received event network-changed-c1cd0433-e331-4e76-af42-c5cd0421b041 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 623.543124] env[69367]: DEBUG nova.compute.manager [req-564381c2-259f-4051-b674-0c2e3d42b359 req-928c76ce-6fec-4af1-8bd6-c5fa8547e3a9 service nova] [instance: 1302cad6-55b7-4905-92c1-dfdd37042e30] Refreshing instance network info cache due to event network-changed-c1cd0433-e331-4e76-af42-c5cd0421b041. {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11772}} [ 623.543124] env[69367]: DEBUG oslo_concurrency.lockutils [req-564381c2-259f-4051-b674-0c2e3d42b359 req-928c76ce-6fec-4af1-8bd6-c5fa8547e3a9 service nova] Acquiring lock "refresh_cache-1302cad6-55b7-4905-92c1-dfdd37042e30" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 623.543124] env[69367]: DEBUG oslo_concurrency.lockutils [req-564381c2-259f-4051-b674-0c2e3d42b359 req-928c76ce-6fec-4af1-8bd6-c5fa8547e3a9 service nova] Acquired lock "refresh_cache-1302cad6-55b7-4905-92c1-dfdd37042e30" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 623.543124] env[69367]: DEBUG nova.network.neutron [req-564381c2-259f-4051-b674-0c2e3d42b359 req-928c76ce-6fec-4af1-8bd6-c5fa8547e3a9 service nova] [instance: 1302cad6-55b7-4905-92c1-dfdd37042e30] Refreshing network info cache for port c1cd0433-e331-4e76-af42-c5cd0421b041 {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 623.550587] env[69367]: DEBUG oslo_vmware.api [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Task: {'id': task-4233723, 'name': ReconfigVM_Task, 'duration_secs': 0.708236} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.551400] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Reconfigured VM instance instance-00000005 to attach disk [datastore1] 4e346ed1-36e9-421d-975f-e8bb6f05c0a0/4e346ed1-36e9-421d-975f-e8bb6f05c0a0.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 623.552236] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e62eceab-51e6-4ebf-b7f9-2589675eab27 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.561090] env[69367]: DEBUG oslo_vmware.api [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Waiting for the task: (returnval){ [ 623.561090] env[69367]: value = "task-4233727" [ 623.561090] env[69367]: _type = "Task" [ 623.561090] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.578263] env[69367]: DEBUG oslo_vmware.api [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Task: {'id': task-4233727, 'name': Rename_Task} progress is 6%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.587376] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f247fd4-1d31-4e1b-a7ee-d0f30243780e {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.601860] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32ff4064-fd22-43e3-815e-f34a2cf05733 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.613747] env[69367]: DEBUG oslo_vmware.api [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Task: {'id': task-4233725, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.107616} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.644268] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] Extended root virtual disk {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 623.648096] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29f1fc93-9e9f-44e4-a006-8885176a3746 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.651999] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5f05782-9f46-48f9-be73-d62a15453db9 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.679237] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] Reconfiguring VM instance instance-00000003 to attach disk [datastore1] 5c7b2127-e875-4222-8148-a2ea60631c25/5c7b2127-e875-4222-8148-a2ea60631c25.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 623.681985] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f0d6a07e-d948-4dc3-b6e7-3d70e97d1cf6 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.700644] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a54b9e51-a79b-4a67-875c-9697857954b1 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.717149] env[69367]: DEBUG nova.compute.provider_tree [None req-e2f71031-15ee-4076-80dd-7c13cedc35df tempest-VolumesAssistedSnapshotsTest-171649522 tempest-VolumesAssistedSnapshotsTest-171649522-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 623.720749] env[69367]: DEBUG oslo_vmware.api [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Waiting for the task: (returnval){ [ 623.720749] env[69367]: value = "task-4233728" [ 623.720749] env[69367]: _type = "Task" [ 623.720749] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.730137] env[69367]: DEBUG oslo_vmware.api [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Task: {'id': task-4233728, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.992547] env[69367]: DEBUG oslo_vmware.api [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Task: {'id': task-4233726, 'name': Rename_Task, 'duration_secs': 0.35289} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.003670] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] [instance: 1302cad6-55b7-4905-92c1-dfdd37042e30] Powering on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 624.004064] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e72ba417-d502-4456-bfa2-ac89492bf4ed {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.015181] env[69367]: DEBUG oslo_vmware.api [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Task: {'id': task-4233724, 'name': PowerOnVM_Task, 'duration_secs': 0.545187} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.016666] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] Powered on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 624.016891] env[69367]: INFO nova.compute.manager [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] Took 9.53 seconds to spawn the instance on the hypervisor. [ 624.017081] env[69367]: DEBUG nova.compute.manager [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] Checking state {{(pid=69367) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 624.017443] env[69367]: DEBUG oslo_vmware.api [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Waiting for the task: (returnval){ [ 624.017443] env[69367]: value = "task-4233729" [ 624.017443] env[69367]: _type = "Task" [ 624.017443] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.018304] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89269e09-3d65-41f4-bfd6-4fed53b74083 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.034698] env[69367]: DEBUG oslo_vmware.api [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Task: {'id': task-4233729, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.076642] env[69367]: DEBUG oslo_vmware.api [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Task: {'id': task-4233727, 'name': Rename_Task} progress is 14%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.238400] env[69367]: DEBUG oslo_vmware.api [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Task: {'id': task-4233728, 'name': ReconfigVM_Task, 'duration_secs': 0.324941} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.238400] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] Reconfigured VM instance instance-00000003 to attach disk [datastore1] 5c7b2127-e875-4222-8148-a2ea60631c25/5c7b2127-e875-4222-8148-a2ea60631c25.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 624.238709] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8b010203-7ffa-459d-85d6-b5f55177603b {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.246619] env[69367]: DEBUG oslo_vmware.api [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Waiting for the task: (returnval){ [ 624.246619] env[69367]: value = "task-4233730" [ 624.246619] env[69367]: _type = "Task" [ 624.246619] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.252053] env[69367]: ERROR nova.scheduler.client.report [None req-e2f71031-15ee-4076-80dd-7c13cedc35df tempest-VolumesAssistedSnapshotsTest-171649522 tempest-VolumesAssistedSnapshotsTest-171649522-project-member] [req-fb9850bb-0217-4c2b-bca2-a84e23c21ba1] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-fb9850bb-0217-4c2b-bca2-a84e23c21ba1"}]} [ 624.252053] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e2f71031-15ee-4076-80dd-7c13cedc35df tempest-VolumesAssistedSnapshotsTest-171649522 tempest-VolumesAssistedSnapshotsTest-171649522-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.003s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 624.256092] env[69367]: ERROR nova.compute.manager [None req-e2f71031-15ee-4076-80dd-7c13cedc35df tempest-VolumesAssistedSnapshotsTest-171649522 tempest-VolumesAssistedSnapshotsTest-171649522-project-member] [instance: 5341066e-fb7d-4951-935e-6188442981a5] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 624.256092] env[69367]: ERROR nova.compute.manager [instance: 5341066e-fb7d-4951-935e-6188442981a5] Traceback (most recent call last): [ 624.256092] env[69367]: ERROR nova.compute.manager [instance: 5341066e-fb7d-4951-935e-6188442981a5] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 624.256092] env[69367]: ERROR nova.compute.manager [instance: 5341066e-fb7d-4951-935e-6188442981a5] yield [ 624.256092] env[69367]: ERROR nova.compute.manager [instance: 5341066e-fb7d-4951-935e-6188442981a5] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 624.256092] env[69367]: ERROR nova.compute.manager [instance: 5341066e-fb7d-4951-935e-6188442981a5] self.set_inventory_for_provider( [ 624.256092] env[69367]: ERROR nova.compute.manager [instance: 5341066e-fb7d-4951-935e-6188442981a5] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 624.256092] env[69367]: ERROR nova.compute.manager [instance: 5341066e-fb7d-4951-935e-6188442981a5] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 624.257664] env[69367]: ERROR nova.compute.manager [instance: 5341066e-fb7d-4951-935e-6188442981a5] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-fb9850bb-0217-4c2b-bca2-a84e23c21ba1"}]} [ 624.257664] env[69367]: ERROR nova.compute.manager [instance: 5341066e-fb7d-4951-935e-6188442981a5] [ 624.257664] env[69367]: ERROR nova.compute.manager [instance: 5341066e-fb7d-4951-935e-6188442981a5] During handling of the above exception, another exception occurred: [ 624.257664] env[69367]: ERROR nova.compute.manager [instance: 5341066e-fb7d-4951-935e-6188442981a5] [ 624.257664] env[69367]: ERROR nova.compute.manager [instance: 5341066e-fb7d-4951-935e-6188442981a5] Traceback (most recent call last): [ 624.257664] env[69367]: ERROR nova.compute.manager [instance: 5341066e-fb7d-4951-935e-6188442981a5] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 624.257664] env[69367]: ERROR nova.compute.manager [instance: 5341066e-fb7d-4951-935e-6188442981a5] with self.rt.instance_claim(context, instance, node, allocs, [ 624.257664] env[69367]: ERROR nova.compute.manager [instance: 5341066e-fb7d-4951-935e-6188442981a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 624.257664] env[69367]: ERROR nova.compute.manager [instance: 5341066e-fb7d-4951-935e-6188442981a5] return f(*args, **kwargs) [ 624.258259] env[69367]: ERROR nova.compute.manager [instance: 5341066e-fb7d-4951-935e-6188442981a5] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 624.258259] env[69367]: ERROR nova.compute.manager [instance: 5341066e-fb7d-4951-935e-6188442981a5] self._update(elevated, cn) [ 624.258259] env[69367]: ERROR nova.compute.manager [instance: 5341066e-fb7d-4951-935e-6188442981a5] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 624.258259] env[69367]: ERROR nova.compute.manager [instance: 5341066e-fb7d-4951-935e-6188442981a5] self._update_to_placement(context, compute_node, startup) [ 624.258259] env[69367]: ERROR nova.compute.manager [instance: 5341066e-fb7d-4951-935e-6188442981a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 624.258259] env[69367]: ERROR nova.compute.manager [instance: 5341066e-fb7d-4951-935e-6188442981a5] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 624.258259] env[69367]: ERROR nova.compute.manager [instance: 5341066e-fb7d-4951-935e-6188442981a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 624.258259] env[69367]: ERROR nova.compute.manager [instance: 5341066e-fb7d-4951-935e-6188442981a5] return attempt.get(self._wrap_exception) [ 624.258259] env[69367]: ERROR nova.compute.manager [instance: 5341066e-fb7d-4951-935e-6188442981a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 624.258259] env[69367]: ERROR nova.compute.manager [instance: 5341066e-fb7d-4951-935e-6188442981a5] six.reraise(self.value[0], self.value[1], self.value[2]) [ 624.258259] env[69367]: ERROR nova.compute.manager [instance: 5341066e-fb7d-4951-935e-6188442981a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 624.258259] env[69367]: ERROR nova.compute.manager [instance: 5341066e-fb7d-4951-935e-6188442981a5] raise value [ 624.258259] env[69367]: ERROR nova.compute.manager [instance: 5341066e-fb7d-4951-935e-6188442981a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 624.258582] env[69367]: ERROR nova.compute.manager [instance: 5341066e-fb7d-4951-935e-6188442981a5] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 624.258582] env[69367]: ERROR nova.compute.manager [instance: 5341066e-fb7d-4951-935e-6188442981a5] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 624.258582] env[69367]: ERROR nova.compute.manager [instance: 5341066e-fb7d-4951-935e-6188442981a5] self.reportclient.update_from_provider_tree( [ 624.258582] env[69367]: ERROR nova.compute.manager [instance: 5341066e-fb7d-4951-935e-6188442981a5] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 624.258582] env[69367]: ERROR nova.compute.manager [instance: 5341066e-fb7d-4951-935e-6188442981a5] with catch_all(pd.uuid): [ 624.258582] env[69367]: ERROR nova.compute.manager [instance: 5341066e-fb7d-4951-935e-6188442981a5] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 624.258582] env[69367]: ERROR nova.compute.manager [instance: 5341066e-fb7d-4951-935e-6188442981a5] self.gen.throw(typ, value, traceback) [ 624.258582] env[69367]: ERROR nova.compute.manager [instance: 5341066e-fb7d-4951-935e-6188442981a5] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 624.258582] env[69367]: ERROR nova.compute.manager [instance: 5341066e-fb7d-4951-935e-6188442981a5] raise exception.ResourceProviderSyncFailed() [ 624.258582] env[69367]: ERROR nova.compute.manager [instance: 5341066e-fb7d-4951-935e-6188442981a5] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 624.258582] env[69367]: ERROR nova.compute.manager [instance: 5341066e-fb7d-4951-935e-6188442981a5] [ 624.259212] env[69367]: DEBUG nova.compute.utils [None req-e2f71031-15ee-4076-80dd-7c13cedc35df tempest-VolumesAssistedSnapshotsTest-171649522 tempest-VolumesAssistedSnapshotsTest-171649522-project-member] [instance: 5341066e-fb7d-4951-935e-6188442981a5] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 624.259355] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5ef0996f-0500-429e-97b5-259cde98018c tempest-FloatingIPsAssociationTestJSON-1476451203 tempest-FloatingIPsAssociationTestJSON-1476451203-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.301s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 624.261712] env[69367]: INFO nova.compute.claims [None req-5ef0996f-0500-429e-97b5-259cde98018c tempest-FloatingIPsAssociationTestJSON-1476451203 tempest-FloatingIPsAssociationTestJSON-1476451203-project-member] [instance: 7a13d45a-1941-4caf-a510-34b11d78b5e7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 624.271263] env[69367]: DEBUG oslo_vmware.api [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Task: {'id': task-4233730, 'name': Rename_Task} progress is 14%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.272131] env[69367]: DEBUG nova.compute.manager [None req-e2f71031-15ee-4076-80dd-7c13cedc35df tempest-VolumesAssistedSnapshotsTest-171649522 tempest-VolumesAssistedSnapshotsTest-171649522-project-member] [instance: 5341066e-fb7d-4951-935e-6188442981a5] Build of instance 5341066e-fb7d-4951-935e-6188442981a5 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 624.272493] env[69367]: DEBUG nova.compute.manager [None req-e2f71031-15ee-4076-80dd-7c13cedc35df tempest-VolumesAssistedSnapshotsTest-171649522 tempest-VolumesAssistedSnapshotsTest-171649522-project-member] [instance: 5341066e-fb7d-4951-935e-6188442981a5] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 624.273074] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e2f71031-15ee-4076-80dd-7c13cedc35df tempest-VolumesAssistedSnapshotsTest-171649522 tempest-VolumesAssistedSnapshotsTest-171649522-project-member] Acquiring lock "refresh_cache-5341066e-fb7d-4951-935e-6188442981a5" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 624.273074] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e2f71031-15ee-4076-80dd-7c13cedc35df tempest-VolumesAssistedSnapshotsTest-171649522 tempest-VolumesAssistedSnapshotsTest-171649522-project-member] Acquired lock "refresh_cache-5341066e-fb7d-4951-935e-6188442981a5" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 624.273074] env[69367]: DEBUG nova.network.neutron [None req-e2f71031-15ee-4076-80dd-7c13cedc35df tempest-VolumesAssistedSnapshotsTest-171649522 tempest-VolumesAssistedSnapshotsTest-171649522-project-member] [instance: 5341066e-fb7d-4951-935e-6188442981a5] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 624.279509] env[69367]: DEBUG nova.compute.manager [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] [instance: 92bdb1b1-d8ab-46b2-9037-ee8fea4642ce] Start spawning the instance on the hypervisor. {{(pid=69367) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 624.323605] env[69367]: DEBUG nova.virt.hardware [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-19T12:49:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-19T12:49:28Z,direct_url=,disk_format='vmdk',id=2b099420-9152-4d93-9609-4c9317824c11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cd7c200d5cd6461fb951580f8c764c42',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-19T12:49:29Z,virtual_size=,visibility=), allow threads: False {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 624.324354] env[69367]: DEBUG nova.virt.hardware [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Flavor limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 624.324354] env[69367]: DEBUG nova.virt.hardware [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Image limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 624.324354] env[69367]: DEBUG nova.virt.hardware [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Flavor pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 624.324530] env[69367]: DEBUG nova.virt.hardware [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Image pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 624.324983] env[69367]: DEBUG nova.virt.hardware [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 624.324983] env[69367]: DEBUG nova.virt.hardware [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 624.324983] env[69367]: DEBUG nova.virt.hardware [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 624.325129] env[69367]: DEBUG nova.virt.hardware [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Got 1 possible topologies {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 624.325294] env[69367]: DEBUG nova.virt.hardware [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 624.325473] env[69367]: DEBUG nova.virt.hardware [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 624.327423] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ba3e732-828e-4151-ab39-e3f2904581a1 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.338446] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e56af06-9e3b-42bf-8878-ec0aa3c50be2 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.534406] env[69367]: DEBUG oslo_vmware.api [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Task: {'id': task-4233729, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.553672] env[69367]: INFO nova.compute.manager [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] Took 19.70 seconds to build instance. [ 624.575245] env[69367]: DEBUG oslo_vmware.api [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Task: {'id': task-4233727, 'name': Rename_Task, 'duration_secs': 0.845787} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.576637] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Powering on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 624.576918] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a4718f75-89d5-4e4c-934b-2701d0f4c06f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.586354] env[69367]: DEBUG oslo_vmware.api [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Waiting for the task: (returnval){ [ 624.586354] env[69367]: value = "task-4233731" [ 624.586354] env[69367]: _type = "Task" [ 624.586354] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.597661] env[69367]: DEBUG oslo_vmware.api [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Task: {'id': task-4233731, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.763446] env[69367]: DEBUG oslo_vmware.api [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Task: {'id': task-4233730, 'name': Rename_Task, 'duration_secs': 0.160373} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.763698] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] Powering on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 624.763993] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8eab0072-b239-4d4a-8b7a-19e20a7767ae {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.772042] env[69367]: DEBUG oslo_vmware.api [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Waiting for the task: (returnval){ [ 624.772042] env[69367]: value = "task-4233732" [ 624.772042] env[69367]: _type = "Task" [ 624.772042] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.797146] env[69367]: DEBUG oslo_vmware.api [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Task: {'id': task-4233732, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.848703] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2aa8ff71-f61c-43db-af35-540a770a2ca5 tempest-MigrationsAdminTest-1077393646 tempest-MigrationsAdminTest-1077393646-project-member] Acquiring lock "db11f64c-0881-4a06-ba8d-6f52ec7fab16" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 624.849989] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2aa8ff71-f61c-43db-af35-540a770a2ca5 tempest-MigrationsAdminTest-1077393646 tempest-MigrationsAdminTest-1077393646-project-member] Lock "db11f64c-0881-4a06-ba8d-6f52ec7fab16" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 624.890945] env[69367]: DEBUG nova.network.neutron [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] Successfully updated port: 013efad5-0b57-43e9-b662-10e31d24d8af {{(pid=69367) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 624.919707] env[69367]: DEBUG nova.network.neutron [None req-e2f71031-15ee-4076-80dd-7c13cedc35df tempest-VolumesAssistedSnapshotsTest-171649522 tempest-VolumesAssistedSnapshotsTest-171649522-project-member] [instance: 5341066e-fb7d-4951-935e-6188442981a5] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 624.944938] env[69367]: DEBUG nova.network.neutron [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] [instance: 92bdb1b1-d8ab-46b2-9037-ee8fea4642ce] Successfully created port: a7fe18e3-9f20-481e-b223-1b2907709041 {{(pid=69367) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 625.040990] env[69367]: DEBUG oslo_vmware.api [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Task: {'id': task-4233729, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.056252] env[69367]: DEBUG oslo_concurrency.lockutils [None req-9af23c13-6fa5-412e-bfe7-3abb63184363 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Lock "a358ce6d-9826-4ddb-8c2f-51bac8db59d4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.214s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 625.103498] env[69367]: DEBUG oslo_vmware.api [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Task: {'id': task-4233731, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.292043] env[69367]: DEBUG oslo_vmware.api [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Task: {'id': task-4233732, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.306481] env[69367]: DEBUG nova.scheduler.client.report [None req-5ef0996f-0500-429e-97b5-259cde98018c tempest-FloatingIPsAssociationTestJSON-1476451203 tempest-FloatingIPsAssociationTestJSON-1476451203-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 625.335867] env[69367]: DEBUG nova.scheduler.client.report [None req-5ef0996f-0500-429e-97b5-259cde98018c tempest-FloatingIPsAssociationTestJSON-1476451203 tempest-FloatingIPsAssociationTestJSON-1476451203-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 625.335867] env[69367]: DEBUG nova.compute.provider_tree [None req-5ef0996f-0500-429e-97b5-259cde98018c tempest-FloatingIPsAssociationTestJSON-1476451203 tempest-FloatingIPsAssociationTestJSON-1476451203-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 625.354904] env[69367]: DEBUG nova.scheduler.client.report [None req-5ef0996f-0500-429e-97b5-259cde98018c tempest-FloatingIPsAssociationTestJSON-1476451203 tempest-FloatingIPsAssociationTestJSON-1476451203-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 625.394855] env[69367]: DEBUG nova.scheduler.client.report [None req-5ef0996f-0500-429e-97b5-259cde98018c tempest-FloatingIPsAssociationTestJSON-1476451203 tempest-FloatingIPsAssociationTestJSON-1476451203-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 625.400067] env[69367]: DEBUG oslo_concurrency.lockutils [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Acquiring lock "refresh_cache-e1c7d100-4ad7-4871-970f-bb7562bfc6fc" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 625.400446] env[69367]: DEBUG oslo_concurrency.lockutils [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Acquired lock "refresh_cache-e1c7d100-4ad7-4871-970f-bb7562bfc6fc" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 625.408317] env[69367]: DEBUG nova.network.neutron [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 625.482088] env[69367]: DEBUG nova.network.neutron [req-564381c2-259f-4051-b674-0c2e3d42b359 req-928c76ce-6fec-4af1-8bd6-c5fa8547e3a9 service nova] [instance: 1302cad6-55b7-4905-92c1-dfdd37042e30] Updated VIF entry in instance network info cache for port c1cd0433-e331-4e76-af42-c5cd0421b041. {{(pid=69367) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 625.482088] env[69367]: DEBUG nova.network.neutron [req-564381c2-259f-4051-b674-0c2e3d42b359 req-928c76ce-6fec-4af1-8bd6-c5fa8547e3a9 service nova] [instance: 1302cad6-55b7-4905-92c1-dfdd37042e30] Updating instance_info_cache with network_info: [{"id": "c1cd0433-e331-4e76-af42-c5cd0421b041", "address": "fa:16:3e:b1:48:3d", "network": {"id": "62ab7c93-8b0d-49ed-aa82-3bc315e190df", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.26", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cd7c200d5cd6461fb951580f8c764c42", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc1cd0433-e3", "ovs_interfaceid": "c1cd0433-e331-4e76-af42-c5cd0421b041", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 625.504853] env[69367]: DEBUG nova.network.neutron [req-0d179373-235b-4382-9909-dc54845809f3 req-ef12c1cd-662d-4e7a-a27c-113a707e8b48 service nova] [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] Updated VIF entry in instance network info cache for port 4051893d-10cc-4cb9-8e30-089e8d3d4286. {{(pid=69367) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 625.505228] env[69367]: DEBUG nova.network.neutron [req-0d179373-235b-4382-9909-dc54845809f3 req-ef12c1cd-662d-4e7a-a27c-113a707e8b48 service nova] [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] Updating instance_info_cache with network_info: [{"id": "4051893d-10cc-4cb9-8e30-089e8d3d4286", "address": "fa:16:3e:a8:2e:46", "network": {"id": "62ab7c93-8b0d-49ed-aa82-3bc315e190df", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.214", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cd7c200d5cd6461fb951580f8c764c42", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4051893d-10", "ovs_interfaceid": "4051893d-10cc-4cb9-8e30-089e8d3d4286", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 625.513491] env[69367]: DEBUG nova.network.neutron [None req-e2f71031-15ee-4076-80dd-7c13cedc35df tempest-VolumesAssistedSnapshotsTest-171649522 tempest-VolumesAssistedSnapshotsTest-171649522-project-member] [instance: 5341066e-fb7d-4951-935e-6188442981a5] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 625.543419] env[69367]: DEBUG oslo_vmware.api [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Task: {'id': task-4233729, 'name': PowerOnVM_Task, 'duration_secs': 1.146963} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 625.543419] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] [instance: 1302cad6-55b7-4905-92c1-dfdd37042e30] Powered on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 625.543419] env[69367]: INFO nova.compute.manager [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] [instance: 1302cad6-55b7-4905-92c1-dfdd37042e30] Took 15.71 seconds to spawn the instance on the hypervisor. [ 625.543725] env[69367]: DEBUG nova.compute.manager [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] [instance: 1302cad6-55b7-4905-92c1-dfdd37042e30] Checking state {{(pid=69367) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 625.547259] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd42151e-9abf-452b-9500-fce20913b5f0 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.563775] env[69367]: DEBUG nova.compute.manager [None req-7c66dfce-abbe-4ff0-97b3-5ac8ccf941b4 tempest-ServerAddressesTestJSON-1140368918 tempest-ServerAddressesTestJSON-1140368918-project-member] [instance: 07a65426-e348-4f6f-8898-45409e15c554] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 625.606791] env[69367]: DEBUG oslo_vmware.api [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Task: {'id': task-4233731, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.743422] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd75944d-275e-4d2e-bb92-ef2f9f2db617 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.752039] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3acba306-e8ca-46ee-9f89-ff8d3eb2e1e3 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.792593] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-decd736f-c9b0-439a-b430-535ef8577935 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.807027] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4737b9b5-1765-4e47-a75e-d88667c61149 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.808902] env[69367]: DEBUG oslo_vmware.api [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Task: {'id': task-4233732, 'name': PowerOnVM_Task, 'duration_secs': 0.700659} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 625.809378] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] Powered on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 625.809492] env[69367]: INFO nova.compute.manager [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] Took 13.59 seconds to spawn the instance on the hypervisor. [ 625.809682] env[69367]: DEBUG nova.compute.manager [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] Checking state {{(pid=69367) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 625.811536] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c60d002f-6c3e-43a9-b5e8-ab187532ad46 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.824787] env[69367]: DEBUG nova.compute.provider_tree [None req-5ef0996f-0500-429e-97b5-259cde98018c tempest-FloatingIPsAssociationTestJSON-1476451203 tempest-FloatingIPsAssociationTestJSON-1476451203-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 625.985087] env[69367]: DEBUG oslo_concurrency.lockutils [req-564381c2-259f-4051-b674-0c2e3d42b359 req-928c76ce-6fec-4af1-8bd6-c5fa8547e3a9 service nova] Releasing lock "refresh_cache-1302cad6-55b7-4905-92c1-dfdd37042e30" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 626.011248] env[69367]: DEBUG oslo_concurrency.lockutils [req-0d179373-235b-4382-9909-dc54845809f3 req-ef12c1cd-662d-4e7a-a27c-113a707e8b48 service nova] Releasing lock "refresh_cache-5c7b2127-e875-4222-8148-a2ea60631c25" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 626.020320] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e2f71031-15ee-4076-80dd-7c13cedc35df tempest-VolumesAssistedSnapshotsTest-171649522 tempest-VolumesAssistedSnapshotsTest-171649522-project-member] Releasing lock "refresh_cache-5341066e-fb7d-4951-935e-6188442981a5" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 626.020462] env[69367]: DEBUG nova.compute.manager [None req-e2f71031-15ee-4076-80dd-7c13cedc35df tempest-VolumesAssistedSnapshotsTest-171649522 tempest-VolumesAssistedSnapshotsTest-171649522-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 626.020901] env[69367]: DEBUG nova.compute.manager [None req-e2f71031-15ee-4076-80dd-7c13cedc35df tempest-VolumesAssistedSnapshotsTest-171649522 tempest-VolumesAssistedSnapshotsTest-171649522-project-member] [instance: 5341066e-fb7d-4951-935e-6188442981a5] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 626.020901] env[69367]: DEBUG nova.network.neutron [None req-e2f71031-15ee-4076-80dd-7c13cedc35df tempest-VolumesAssistedSnapshotsTest-171649522 tempest-VolumesAssistedSnapshotsTest-171649522-project-member] [instance: 5341066e-fb7d-4951-935e-6188442981a5] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 626.093942] env[69367]: INFO nova.compute.manager [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] [instance: 1302cad6-55b7-4905-92c1-dfdd37042e30] Took 22.71 seconds to build instance. [ 626.107711] env[69367]: DEBUG oslo_vmware.api [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Task: {'id': task-4233731, 'name': PowerOnVM_Task, 'duration_secs': 1.270613} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.107983] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Powered on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 626.108213] env[69367]: INFO nova.compute.manager [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Took 9.11 seconds to spawn the instance on the hypervisor. [ 626.108761] env[69367]: DEBUG nova.compute.manager [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Checking state {{(pid=69367) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 626.109930] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a22e6f4e-06d4-49ce-bc46-d44568aafc02 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.122073] env[69367]: DEBUG oslo_concurrency.lockutils [None req-7c66dfce-abbe-4ff0-97b3-5ac8ccf941b4 tempest-ServerAddressesTestJSON-1140368918 tempest-ServerAddressesTestJSON-1140368918-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 626.150026] env[69367]: DEBUG nova.network.neutron [None req-e2f71031-15ee-4076-80dd-7c13cedc35df tempest-VolumesAssistedSnapshotsTest-171649522 tempest-VolumesAssistedSnapshotsTest-171649522-project-member] [instance: 5341066e-fb7d-4951-935e-6188442981a5] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 626.227579] env[69367]: DEBUG nova.network.neutron [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 626.291548] env[69367]: DEBUG oslo_concurrency.lockutils [None req-05f9ed91-7693-4818-a6f4-6a76f2db0941 tempest-ListImageFiltersTestJSON-127647720 tempest-ListImageFiltersTestJSON-127647720-project-member] Acquiring lock "83fb3858-0c21-42f1-a815-f007bcdb8561" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 626.291887] env[69367]: DEBUG oslo_concurrency.lockutils [None req-05f9ed91-7693-4818-a6f4-6a76f2db0941 tempest-ListImageFiltersTestJSON-127647720 tempest-ListImageFiltersTestJSON-127647720-project-member] Lock "83fb3858-0c21-42f1-a815-f007bcdb8561" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 626.355074] env[69367]: INFO nova.compute.manager [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] Took 22.59 seconds to build instance. [ 626.365046] env[69367]: ERROR nova.scheduler.client.report [None req-5ef0996f-0500-429e-97b5-259cde98018c tempest-FloatingIPsAssociationTestJSON-1476451203 tempest-FloatingIPsAssociationTestJSON-1476451203-project-member] [req-0317b2ca-733b-43bf-a02f-669c82fcbcec] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-0317b2ca-733b-43bf-a02f-669c82fcbcec"}]} [ 626.365310] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5ef0996f-0500-429e-97b5-259cde98018c tempest-FloatingIPsAssociationTestJSON-1476451203 tempest-FloatingIPsAssociationTestJSON-1476451203-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.106s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 626.365986] env[69367]: ERROR nova.compute.manager [None req-5ef0996f-0500-429e-97b5-259cde98018c tempest-FloatingIPsAssociationTestJSON-1476451203 tempest-FloatingIPsAssociationTestJSON-1476451203-project-member] [instance: 7a13d45a-1941-4caf-a510-34b11d78b5e7] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 626.365986] env[69367]: ERROR nova.compute.manager [instance: 7a13d45a-1941-4caf-a510-34b11d78b5e7] Traceback (most recent call last): [ 626.365986] env[69367]: ERROR nova.compute.manager [instance: 7a13d45a-1941-4caf-a510-34b11d78b5e7] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 626.365986] env[69367]: ERROR nova.compute.manager [instance: 7a13d45a-1941-4caf-a510-34b11d78b5e7] yield [ 626.365986] env[69367]: ERROR nova.compute.manager [instance: 7a13d45a-1941-4caf-a510-34b11d78b5e7] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 626.365986] env[69367]: ERROR nova.compute.manager [instance: 7a13d45a-1941-4caf-a510-34b11d78b5e7] self.set_inventory_for_provider( [ 626.365986] env[69367]: ERROR nova.compute.manager [instance: 7a13d45a-1941-4caf-a510-34b11d78b5e7] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 626.365986] env[69367]: ERROR nova.compute.manager [instance: 7a13d45a-1941-4caf-a510-34b11d78b5e7] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 626.366288] env[69367]: ERROR nova.compute.manager [instance: 7a13d45a-1941-4caf-a510-34b11d78b5e7] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-0317b2ca-733b-43bf-a02f-669c82fcbcec"}]} [ 626.366288] env[69367]: ERROR nova.compute.manager [instance: 7a13d45a-1941-4caf-a510-34b11d78b5e7] [ 626.366288] env[69367]: ERROR nova.compute.manager [instance: 7a13d45a-1941-4caf-a510-34b11d78b5e7] During handling of the above exception, another exception occurred: [ 626.366288] env[69367]: ERROR nova.compute.manager [instance: 7a13d45a-1941-4caf-a510-34b11d78b5e7] [ 626.366288] env[69367]: ERROR nova.compute.manager [instance: 7a13d45a-1941-4caf-a510-34b11d78b5e7] Traceback (most recent call last): [ 626.366288] env[69367]: ERROR nova.compute.manager [instance: 7a13d45a-1941-4caf-a510-34b11d78b5e7] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 626.366288] env[69367]: ERROR nova.compute.manager [instance: 7a13d45a-1941-4caf-a510-34b11d78b5e7] with self.rt.instance_claim(context, instance, node, allocs, [ 626.366288] env[69367]: ERROR nova.compute.manager [instance: 7a13d45a-1941-4caf-a510-34b11d78b5e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 626.366288] env[69367]: ERROR nova.compute.manager [instance: 7a13d45a-1941-4caf-a510-34b11d78b5e7] return f(*args, **kwargs) [ 626.366607] env[69367]: ERROR nova.compute.manager [instance: 7a13d45a-1941-4caf-a510-34b11d78b5e7] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 626.366607] env[69367]: ERROR nova.compute.manager [instance: 7a13d45a-1941-4caf-a510-34b11d78b5e7] self._update(elevated, cn) [ 626.366607] env[69367]: ERROR nova.compute.manager [instance: 7a13d45a-1941-4caf-a510-34b11d78b5e7] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 626.366607] env[69367]: ERROR nova.compute.manager [instance: 7a13d45a-1941-4caf-a510-34b11d78b5e7] self._update_to_placement(context, compute_node, startup) [ 626.366607] env[69367]: ERROR nova.compute.manager [instance: 7a13d45a-1941-4caf-a510-34b11d78b5e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 626.366607] env[69367]: ERROR nova.compute.manager [instance: 7a13d45a-1941-4caf-a510-34b11d78b5e7] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 626.366607] env[69367]: ERROR nova.compute.manager [instance: 7a13d45a-1941-4caf-a510-34b11d78b5e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 626.366607] env[69367]: ERROR nova.compute.manager [instance: 7a13d45a-1941-4caf-a510-34b11d78b5e7] return attempt.get(self._wrap_exception) [ 626.366607] env[69367]: ERROR nova.compute.manager [instance: 7a13d45a-1941-4caf-a510-34b11d78b5e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 626.366607] env[69367]: ERROR nova.compute.manager [instance: 7a13d45a-1941-4caf-a510-34b11d78b5e7] six.reraise(self.value[0], self.value[1], self.value[2]) [ 626.366607] env[69367]: ERROR nova.compute.manager [instance: 7a13d45a-1941-4caf-a510-34b11d78b5e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 626.366607] env[69367]: ERROR nova.compute.manager [instance: 7a13d45a-1941-4caf-a510-34b11d78b5e7] raise value [ 626.366607] env[69367]: ERROR nova.compute.manager [instance: 7a13d45a-1941-4caf-a510-34b11d78b5e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 626.366958] env[69367]: ERROR nova.compute.manager [instance: 7a13d45a-1941-4caf-a510-34b11d78b5e7] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 626.366958] env[69367]: ERROR nova.compute.manager [instance: 7a13d45a-1941-4caf-a510-34b11d78b5e7] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 626.366958] env[69367]: ERROR nova.compute.manager [instance: 7a13d45a-1941-4caf-a510-34b11d78b5e7] self.reportclient.update_from_provider_tree( [ 626.366958] env[69367]: ERROR nova.compute.manager [instance: 7a13d45a-1941-4caf-a510-34b11d78b5e7] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 626.366958] env[69367]: ERROR nova.compute.manager [instance: 7a13d45a-1941-4caf-a510-34b11d78b5e7] with catch_all(pd.uuid): [ 626.366958] env[69367]: ERROR nova.compute.manager [instance: 7a13d45a-1941-4caf-a510-34b11d78b5e7] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 626.366958] env[69367]: ERROR nova.compute.manager [instance: 7a13d45a-1941-4caf-a510-34b11d78b5e7] self.gen.throw(typ, value, traceback) [ 626.366958] env[69367]: ERROR nova.compute.manager [instance: 7a13d45a-1941-4caf-a510-34b11d78b5e7] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 626.366958] env[69367]: ERROR nova.compute.manager [instance: 7a13d45a-1941-4caf-a510-34b11d78b5e7] raise exception.ResourceProviderSyncFailed() [ 626.366958] env[69367]: ERROR nova.compute.manager [instance: 7a13d45a-1941-4caf-a510-34b11d78b5e7] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 626.366958] env[69367]: ERROR nova.compute.manager [instance: 7a13d45a-1941-4caf-a510-34b11d78b5e7] [ 626.367276] env[69367]: DEBUG nova.compute.utils [None req-5ef0996f-0500-429e-97b5-259cde98018c tempest-FloatingIPsAssociationTestJSON-1476451203 tempest-FloatingIPsAssociationTestJSON-1476451203-project-member] [instance: 7a13d45a-1941-4caf-a510-34b11d78b5e7] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 626.369144] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 8.817s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 626.370999] env[69367]: DEBUG nova.compute.manager [None req-5ef0996f-0500-429e-97b5-259cde98018c tempest-FloatingIPsAssociationTestJSON-1476451203 tempest-FloatingIPsAssociationTestJSON-1476451203-project-member] [instance: 7a13d45a-1941-4caf-a510-34b11d78b5e7] Build of instance 7a13d45a-1941-4caf-a510-34b11d78b5e7 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 626.371657] env[69367]: DEBUG nova.compute.manager [None req-5ef0996f-0500-429e-97b5-259cde98018c tempest-FloatingIPsAssociationTestJSON-1476451203 tempest-FloatingIPsAssociationTestJSON-1476451203-project-member] [instance: 7a13d45a-1941-4caf-a510-34b11d78b5e7] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 626.371749] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5ef0996f-0500-429e-97b5-259cde98018c tempest-FloatingIPsAssociationTestJSON-1476451203 tempest-FloatingIPsAssociationTestJSON-1476451203-project-member] Acquiring lock "refresh_cache-7a13d45a-1941-4caf-a510-34b11d78b5e7" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 626.371959] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5ef0996f-0500-429e-97b5-259cde98018c tempest-FloatingIPsAssociationTestJSON-1476451203 tempest-FloatingIPsAssociationTestJSON-1476451203-project-member] Acquired lock "refresh_cache-7a13d45a-1941-4caf-a510-34b11d78b5e7" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 626.373523] env[69367]: DEBUG nova.network.neutron [None req-5ef0996f-0500-429e-97b5-259cde98018c tempest-FloatingIPsAssociationTestJSON-1476451203 tempest-FloatingIPsAssociationTestJSON-1476451203-project-member] [instance: 7a13d45a-1941-4caf-a510-34b11d78b5e7] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 626.601058] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e5c55306-db19-47a1-ba30-976ca079999c tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Lock "1302cad6-55b7-4905-92c1-dfdd37042e30" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.231s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 626.633134] env[69367]: INFO nova.compute.manager [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Took 21.01 seconds to build instance. [ 626.653198] env[69367]: DEBUG nova.network.neutron [None req-e2f71031-15ee-4076-80dd-7c13cedc35df tempest-VolumesAssistedSnapshotsTest-171649522 tempest-VolumesAssistedSnapshotsTest-171649522-project-member] [instance: 5341066e-fb7d-4951-935e-6188442981a5] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 626.795017] env[69367]: DEBUG nova.network.neutron [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] Updating instance_info_cache with network_info: [{"id": "013efad5-0b57-43e9-b662-10e31d24d8af", "address": "fa:16:3e:1a:c6:8e", "network": {"id": "dd68ce65-5682-4b4c-913c-cf699d2146be", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-341319856-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2375d6603eef45069be4a3541519002a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2c019b6-3ef3-4c8f-95bd-edede2c554a9", "external-id": "nsx-vlan-transportzone-364", "segmentation_id": 364, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap013efad5-0b", "ovs_interfaceid": "013efad5-0b57-43e9-b662-10e31d24d8af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 626.858300] env[69367]: DEBUG oslo_concurrency.lockutils [None req-3e0be0f0-b551-4bea-8ef9-7c8940c10892 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Lock "5c7b2127-e875-4222-8148-a2ea60631c25" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.107s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 626.941594] env[69367]: DEBUG nova.network.neutron [None req-5ef0996f-0500-429e-97b5-259cde98018c tempest-FloatingIPsAssociationTestJSON-1476451203 tempest-FloatingIPsAssociationTestJSON-1476451203-project-member] [instance: 7a13d45a-1941-4caf-a510-34b11d78b5e7] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 627.111146] env[69367]: DEBUG nova.compute.manager [req-c0e67d90-9d6e-46da-981a-7fb10b459b07 req-d5552699-8e83-4e30-b1e4-fb218957328a service nova] [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] Received event network-vif-plugged-013efad5-0b57-43e9-b662-10e31d24d8af {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 627.111146] env[69367]: DEBUG oslo_concurrency.lockutils [req-c0e67d90-9d6e-46da-981a-7fb10b459b07 req-d5552699-8e83-4e30-b1e4-fb218957328a service nova] Acquiring lock "e1c7d100-4ad7-4871-970f-bb7562bfc6fc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 627.111146] env[69367]: DEBUG oslo_concurrency.lockutils [req-c0e67d90-9d6e-46da-981a-7fb10b459b07 req-d5552699-8e83-4e30-b1e4-fb218957328a service nova] Lock "e1c7d100-4ad7-4871-970f-bb7562bfc6fc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 627.111146] env[69367]: DEBUG oslo_concurrency.lockutils [req-c0e67d90-9d6e-46da-981a-7fb10b459b07 req-d5552699-8e83-4e30-b1e4-fb218957328a service nova] Lock "e1c7d100-4ad7-4871-970f-bb7562bfc6fc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 627.111146] env[69367]: DEBUG nova.compute.manager [req-c0e67d90-9d6e-46da-981a-7fb10b459b07 req-d5552699-8e83-4e30-b1e4-fb218957328a service nova] [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] No waiting events found dispatching network-vif-plugged-013efad5-0b57-43e9-b662-10e31d24d8af {{(pid=69367) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 627.113073] env[69367]: WARNING nova.compute.manager [req-c0e67d90-9d6e-46da-981a-7fb10b459b07 req-d5552699-8e83-4e30-b1e4-fb218957328a service nova] [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] Received unexpected event network-vif-plugged-013efad5-0b57-43e9-b662-10e31d24d8af for instance with vm_state building and task_state spawning. [ 627.113073] env[69367]: DEBUG nova.compute.manager [req-c0e67d90-9d6e-46da-981a-7fb10b459b07 req-d5552699-8e83-4e30-b1e4-fb218957328a service nova] [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] Received event network-changed-013efad5-0b57-43e9-b662-10e31d24d8af {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 627.113073] env[69367]: DEBUG nova.compute.manager [req-c0e67d90-9d6e-46da-981a-7fb10b459b07 req-d5552699-8e83-4e30-b1e4-fb218957328a service nova] [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] Refreshing instance network info cache due to event network-changed-013efad5-0b57-43e9-b662-10e31d24d8af. {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11772}} [ 627.113073] env[69367]: DEBUG oslo_concurrency.lockutils [req-c0e67d90-9d6e-46da-981a-7fb10b459b07 req-d5552699-8e83-4e30-b1e4-fb218957328a service nova] Acquiring lock "refresh_cache-e1c7d100-4ad7-4871-970f-bb7562bfc6fc" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 627.114549] env[69367]: DEBUG nova.compute.manager [None req-af1ecc9b-a257-426e-95f0-d09e8a0d0989 tempest-ServersTestBootFromVolume-945135387 tempest-ServersTestBootFromVolume-945135387-project-member] [instance: 1df0055c-938e-4048-938c-37590b0138ac] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 627.136492] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a9d00f5f-32f3-462b-a249-9f9b1a74f312 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Lock "4e346ed1-36e9-421d-975f-e8bb6f05c0a0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.525s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 627.157826] env[69367]: INFO nova.compute.manager [None req-e2f71031-15ee-4076-80dd-7c13cedc35df tempest-VolumesAssistedSnapshotsTest-171649522 tempest-VolumesAssistedSnapshotsTest-171649522-project-member] [instance: 5341066e-fb7d-4951-935e-6188442981a5] Took 1.14 seconds to deallocate network for instance. [ 627.217646] env[69367]: DEBUG nova.network.neutron [None req-5ef0996f-0500-429e-97b5-259cde98018c tempest-FloatingIPsAssociationTestJSON-1476451203 tempest-FloatingIPsAssociationTestJSON-1476451203-project-member] [instance: 7a13d45a-1941-4caf-a510-34b11d78b5e7] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 627.298301] env[69367]: DEBUG oslo_concurrency.lockutils [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Releasing lock "refresh_cache-e1c7d100-4ad7-4871-970f-bb7562bfc6fc" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 627.300917] env[69367]: DEBUG nova.compute.manager [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] Instance network_info: |[{"id": "013efad5-0b57-43e9-b662-10e31d24d8af", "address": "fa:16:3e:1a:c6:8e", "network": {"id": "dd68ce65-5682-4b4c-913c-cf699d2146be", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-341319856-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2375d6603eef45069be4a3541519002a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2c019b6-3ef3-4c8f-95bd-edede2c554a9", "external-id": "nsx-vlan-transportzone-364", "segmentation_id": 364, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap013efad5-0b", "ovs_interfaceid": "013efad5-0b57-43e9-b662-10e31d24d8af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69367) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 627.300917] env[69367]: DEBUG oslo_concurrency.lockutils [req-c0e67d90-9d6e-46da-981a-7fb10b459b07 req-d5552699-8e83-4e30-b1e4-fb218957328a service nova] Acquired lock "refresh_cache-e1c7d100-4ad7-4871-970f-bb7562bfc6fc" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 627.301298] env[69367]: DEBUG nova.network.neutron [req-c0e67d90-9d6e-46da-981a-7fb10b459b07 req-d5552699-8e83-4e30-b1e4-fb218957328a service nova] [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] Refreshing network info cache for port 013efad5-0b57-43e9-b662-10e31d24d8af {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 627.301298] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1a:c6:8e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b2c019b6-3ef3-4c8f-95bd-edede2c554a9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '013efad5-0b57-43e9-b662-10e31d24d8af', 'vif_model': 'vmxnet3'}] {{(pid=69367) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 627.318609] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Creating folder: Project (2375d6603eef45069be4a3541519002a). Parent ref: group-v837645. {{(pid=69367) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 627.318609] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3aa7333b-911c-49b6-a1e1-353b9dad626d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.329411] env[69367]: INFO nova.virt.vmwareapi.vm_util [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Created folder: Project (2375d6603eef45069be4a3541519002a) in parent group-v837645. [ 627.329411] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Creating folder: Instances. Parent ref: group-v837665. {{(pid=69367) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 627.329411] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e643fe8e-6cac-46a8-8f10-cce3d19e9891 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.343262] env[69367]: INFO nova.virt.vmwareapi.vm_util [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Created folder: Instances in parent group-v837665. [ 627.344027] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 627.344027] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] Creating VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 627.344027] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9e13d828-7f83-4ec0-b0de-2556de7c1bc9 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.363047] env[69367]: DEBUG nova.compute.manager [None req-2aa8ff71-f61c-43db-af35-540a770a2ca5 tempest-MigrationsAdminTest-1077393646 tempest-MigrationsAdminTest-1077393646-project-member] [instance: db11f64c-0881-4a06-ba8d-6f52ec7fab16] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 627.371139] env[69367]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 627.371139] env[69367]: value = "task-4233735" [ 627.371139] env[69367]: _type = "Task" [ 627.371139] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.384019] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4233735, 'name': CreateVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.422502] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 937c05e9-06f1-4a5f-9f8c-ac40c262ce4e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 627.423052] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 1302cad6-55b7-4905-92c1-dfdd37042e30 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 627.423052] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 5c7b2127-e875-4222-8148-a2ea60631c25 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 627.423052] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance a358ce6d-9826-4ddb-8c2f-51bac8db59d4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 627.423052] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 4e346ed1-36e9-421d-975f-e8bb6f05c0a0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 627.423235] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance e1c7d100-4ad7-4871-970f-bb7562bfc6fc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 627.423235] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 627.423543] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 92bdb1b1-d8ab-46b2-9037-ee8fea4642ce actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 627.423543] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 5341066e-fb7d-4951-935e-6188442981a5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 627.423662] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 7a13d45a-1941-4caf-a510-34b11d78b5e7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 627.639066] env[69367]: DEBUG nova.compute.manager [None req-05f9ed91-7693-4818-a6f4-6a76f2db0941 tempest-ListImageFiltersTestJSON-127647720 tempest-ListImageFiltersTestJSON-127647720-project-member] [instance: 83fb3858-0c21-42f1-a815-f007bcdb8561] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 627.643807] env[69367]: DEBUG oslo_concurrency.lockutils [None req-af1ecc9b-a257-426e-95f0-d09e8a0d0989 tempest-ServersTestBootFromVolume-945135387 tempest-ServersTestBootFromVolume-945135387-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 627.719634] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5ef0996f-0500-429e-97b5-259cde98018c tempest-FloatingIPsAssociationTestJSON-1476451203 tempest-FloatingIPsAssociationTestJSON-1476451203-project-member] Releasing lock "refresh_cache-7a13d45a-1941-4caf-a510-34b11d78b5e7" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 627.719634] env[69367]: DEBUG nova.compute.manager [None req-5ef0996f-0500-429e-97b5-259cde98018c tempest-FloatingIPsAssociationTestJSON-1476451203 tempest-FloatingIPsAssociationTestJSON-1476451203-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 627.719634] env[69367]: DEBUG nova.compute.manager [None req-5ef0996f-0500-429e-97b5-259cde98018c tempest-FloatingIPsAssociationTestJSON-1476451203 tempest-FloatingIPsAssociationTestJSON-1476451203-project-member] [instance: 7a13d45a-1941-4caf-a510-34b11d78b5e7] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 627.719845] env[69367]: DEBUG nova.network.neutron [None req-5ef0996f-0500-429e-97b5-259cde98018c tempest-FloatingIPsAssociationTestJSON-1476451203 tempest-FloatingIPsAssociationTestJSON-1476451203-project-member] [instance: 7a13d45a-1941-4caf-a510-34b11d78b5e7] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 627.835019] env[69367]: DEBUG nova.network.neutron [None req-5ef0996f-0500-429e-97b5-259cde98018c tempest-FloatingIPsAssociationTestJSON-1476451203 tempest-FloatingIPsAssociationTestJSON-1476451203-project-member] [instance: 7a13d45a-1941-4caf-a510-34b11d78b5e7] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 627.892713] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4233735, 'name': CreateVM_Task, 'duration_secs': 0.453745} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.892936] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] Created VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 627.895410] env[69367]: DEBUG oslo_concurrency.lockutils [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 627.895410] env[69367]: DEBUG oslo_concurrency.lockutils [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 627.895410] env[69367]: DEBUG oslo_concurrency.lockutils [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 627.895410] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c25f8bf2-0c7f-4afd-a718-3901f38d6abc {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.902988] env[69367]: DEBUG oslo_vmware.api [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Waiting for the task: (returnval){ [ 627.902988] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52fd4153-6b64-dd2b-af55-7d608204744c" [ 627.902988] env[69367]: _type = "Task" [ 627.902988] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.908468] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2aa8ff71-f61c-43db-af35-540a770a2ca5 tempest-MigrationsAdminTest-1077393646 tempest-MigrationsAdminTest-1077393646-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 627.915104] env[69367]: DEBUG oslo_vmware.api [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52fd4153-6b64-dd2b-af55-7d608204744c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.929539] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance f11c0d77-b53c-4d96-820d-bd3ff3a08955 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 628.015143] env[69367]: DEBUG nova.network.neutron [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] Successfully updated port: b6a0688d-a5a2-4937-9ac7-25b53f9b001d {{(pid=69367) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 628.185642] env[69367]: DEBUG oslo_concurrency.lockutils [None req-05f9ed91-7693-4818-a6f4-6a76f2db0941 tempest-ListImageFiltersTestJSON-127647720 tempest-ListImageFiltersTestJSON-127647720-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 628.185642] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e1cc102c-7874-4dd6-88e4-3f9c2c2d9197 tempest-ListImageFiltersTestJSON-127647720 tempest-ListImageFiltersTestJSON-127647720-project-member] Acquiring lock "5994e782-02fc-47a9-81f8-aa4b6d9fec4b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 628.185642] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e1cc102c-7874-4dd6-88e4-3f9c2c2d9197 tempest-ListImageFiltersTestJSON-127647720 tempest-ListImageFiltersTestJSON-127647720-project-member] Lock "5994e782-02fc-47a9-81f8-aa4b6d9fec4b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 628.194731] env[69367]: INFO nova.scheduler.client.report [None req-e2f71031-15ee-4076-80dd-7c13cedc35df tempest-VolumesAssistedSnapshotsTest-171649522 tempest-VolumesAssistedSnapshotsTest-171649522-project-member] Deleted allocations for instance 5341066e-fb7d-4951-935e-6188442981a5 [ 628.338368] env[69367]: DEBUG nova.network.neutron [None req-5ef0996f-0500-429e-97b5-259cde98018c tempest-FloatingIPsAssociationTestJSON-1476451203 tempest-FloatingIPsAssociationTestJSON-1476451203-project-member] [instance: 7a13d45a-1941-4caf-a510-34b11d78b5e7] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 628.414638] env[69367]: DEBUG oslo_vmware.api [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52fd4153-6b64-dd2b-af55-7d608204744c, 'name': SearchDatastore_Task, 'duration_secs': 0.0119} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.415132] env[69367]: DEBUG oslo_concurrency.lockutils [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 628.415268] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] Processing image 2b099420-9152-4d93-9609-4c9317824c11 {{(pid=69367) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 628.415507] env[69367]: DEBUG oslo_concurrency.lockutils [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 628.416345] env[69367]: DEBUG oslo_concurrency.lockutils [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 628.416345] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 628.416345] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-807528c9-a835-4b79-985f-0c0e620feb68 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.429884] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 628.430163] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69367) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 628.432450] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 07a65426-e348-4f6f-8898-45409e15c554 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 628.433409] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1958083e-2b31-4ab2-9093-d5d0bd00fcd5 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.440932] env[69367]: DEBUG oslo_vmware.api [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Waiting for the task: (returnval){ [ 628.440932] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52823192-a822-40e6-dcbf-b522da1f3a99" [ 628.440932] env[69367]: _type = "Task" [ 628.440932] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.450265] env[69367]: DEBUG oslo_vmware.api [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52823192-a822-40e6-dcbf-b522da1f3a99, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.521517] env[69367]: DEBUG oslo_concurrency.lockutils [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Acquiring lock "refresh_cache-3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 628.521720] env[69367]: DEBUG oslo_concurrency.lockutils [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Acquired lock "refresh_cache-3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 628.521999] env[69367]: DEBUG nova.network.neutron [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 628.648425] env[69367]: DEBUG nova.network.neutron [req-c0e67d90-9d6e-46da-981a-7fb10b459b07 req-d5552699-8e83-4e30-b1e4-fb218957328a service nova] [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] Updated VIF entry in instance network info cache for port 013efad5-0b57-43e9-b662-10e31d24d8af. {{(pid=69367) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 628.649607] env[69367]: DEBUG nova.network.neutron [req-c0e67d90-9d6e-46da-981a-7fb10b459b07 req-d5552699-8e83-4e30-b1e4-fb218957328a service nova] [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] Updating instance_info_cache with network_info: [{"id": "013efad5-0b57-43e9-b662-10e31d24d8af", "address": "fa:16:3e:1a:c6:8e", "network": {"id": "dd68ce65-5682-4b4c-913c-cf699d2146be", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-341319856-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2375d6603eef45069be4a3541519002a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2c019b6-3ef3-4c8f-95bd-edede2c554a9", "external-id": "nsx-vlan-transportzone-364", "segmentation_id": 364, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap013efad5-0b", "ovs_interfaceid": "013efad5-0b57-43e9-b662-10e31d24d8af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 628.705038] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e2f71031-15ee-4076-80dd-7c13cedc35df tempest-VolumesAssistedSnapshotsTest-171649522 tempest-VolumesAssistedSnapshotsTest-171649522-project-member] Lock "5341066e-fb7d-4951-935e-6188442981a5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.882s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 628.722644] env[69367]: DEBUG nova.compute.manager [None req-a9e074d4-0a69-454a-b66f-e71b40fc694f tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] Checking state {{(pid=69367) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 628.723930] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a0ad113-adf0-4418-a91f-17f6aab55bf8 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.844927] env[69367]: INFO nova.compute.manager [None req-5ef0996f-0500-429e-97b5-259cde98018c tempest-FloatingIPsAssociationTestJSON-1476451203 tempest-FloatingIPsAssociationTestJSON-1476451203-project-member] [instance: 7a13d45a-1941-4caf-a510-34b11d78b5e7] Took 1.12 seconds to deallocate network for instance. [ 628.937994] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 1df0055c-938e-4048-938c-37590b0138ac has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 628.954422] env[69367]: DEBUG oslo_vmware.api [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52823192-a822-40e6-dcbf-b522da1f3a99, 'name': SearchDatastore_Task, 'duration_secs': 0.012395} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.955366] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b928541b-11f6-4e0b-90c0-43959cc25e32 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.964448] env[69367]: DEBUG oslo_vmware.api [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Waiting for the task: (returnval){ [ 628.964448] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]520160b1-10d6-a30b-73c4-44f063a10efd" [ 628.964448] env[69367]: _type = "Task" [ 628.964448] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.981444] env[69367]: DEBUG oslo_vmware.api [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]520160b1-10d6-a30b-73c4-44f063a10efd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.085985] env[69367]: DEBUG nova.network.neutron [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] [instance: 92bdb1b1-d8ab-46b2-9037-ee8fea4642ce] Successfully updated port: a7fe18e3-9f20-481e-b223-1b2907709041 {{(pid=69367) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 629.097686] env[69367]: DEBUG nova.network.neutron [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 629.153023] env[69367]: DEBUG oslo_concurrency.lockutils [req-c0e67d90-9d6e-46da-981a-7fb10b459b07 req-d5552699-8e83-4e30-b1e4-fb218957328a service nova] Releasing lock "refresh_cache-e1c7d100-4ad7-4871-970f-bb7562bfc6fc" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 629.217237] env[69367]: DEBUG nova.compute.manager [None req-e1cc102c-7874-4dd6-88e4-3f9c2c2d9197 tempest-ListImageFiltersTestJSON-127647720 tempest-ListImageFiltersTestJSON-127647720-project-member] [instance: 5994e782-02fc-47a9-81f8-aa4b6d9fec4b] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 629.236321] env[69367]: INFO nova.compute.manager [None req-a9e074d4-0a69-454a-b66f-e71b40fc694f tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] instance snapshotting [ 629.237446] env[69367]: DEBUG nova.objects.instance [None req-a9e074d4-0a69-454a-b66f-e71b40fc694f tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Lazy-loading 'flavor' on Instance uuid a358ce6d-9826-4ddb-8c2f-51bac8db59d4 {{(pid=69367) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 629.349397] env[69367]: DEBUG nova.network.neutron [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] Updating instance_info_cache with network_info: [{"id": "b6a0688d-a5a2-4937-9ac7-25b53f9b001d", "address": "fa:16:3e:a7:29:36", "network": {"id": "346c9326-4d8c-4f7c-b346-ea12f5dd891e", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1718591398-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "06c46b0af1af4a788c5e7159fc2daa3d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0ef5aba-bd9a-42ff-a1a0-5e763986d70a", "external-id": "nsx-vlan-transportzone-209", "segmentation_id": 209, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb6a0688d-a5", "ovs_interfaceid": "b6a0688d-a5a2-4937-9ac7-25b53f9b001d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 629.444866] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance db11f64c-0881-4a06-ba8d-6f52ec7fab16 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 629.482517] env[69367]: DEBUG oslo_vmware.api [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]520160b1-10d6-a30b-73c4-44f063a10efd, 'name': SearchDatastore_Task, 'duration_secs': 0.015388} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.483160] env[69367]: DEBUG oslo_concurrency.lockutils [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 629.483883] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore2] e1c7d100-4ad7-4871-970f-bb7562bfc6fc/e1c7d100-4ad7-4871-970f-bb7562bfc6fc.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 629.483883] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a2841792-38e2-4287-879f-06de792ac30b {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.497980] env[69367]: DEBUG oslo_vmware.api [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Waiting for the task: (returnval){ [ 629.497980] env[69367]: value = "task-4233736" [ 629.497980] env[69367]: _type = "Task" [ 629.497980] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 629.506109] env[69367]: DEBUG oslo_vmware.api [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Task: {'id': task-4233736, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.590861] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Acquiring lock "refresh_cache-92bdb1b1-d8ab-46b2-9037-ee8fea4642ce" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 629.592146] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Acquired lock "refresh_cache-92bdb1b1-d8ab-46b2-9037-ee8fea4642ce" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 629.592146] env[69367]: DEBUG nova.network.neutron [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] [instance: 92bdb1b1-d8ab-46b2-9037-ee8fea4642ce] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 629.749132] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04ddbc2c-6276-443c-896c-858b731ebbcb {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.755528] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e1cc102c-7874-4dd6-88e4-3f9c2c2d9197 tempest-ListImageFiltersTestJSON-127647720 tempest-ListImageFiltersTestJSON-127647720-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 629.777037] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01515a21-7698-4414-aa37-331519c52964 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.837095] env[69367]: DEBUG oslo_concurrency.lockutils [None req-37f23c14-fa8a-4512-b66e-872b851a765a tempest-ImagesOneServerNegativeTestJSON-435923336 tempest-ImagesOneServerNegativeTestJSON-435923336-project-member] Acquiring lock "4bef75af-bbe8-4c6e-8c06-9c827ece1134" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 629.837868] env[69367]: DEBUG oslo_concurrency.lockutils [None req-37f23c14-fa8a-4512-b66e-872b851a765a tempest-ImagesOneServerNegativeTestJSON-435923336 tempest-ImagesOneServerNegativeTestJSON-435923336-project-member] Lock "4bef75af-bbe8-4c6e-8c06-9c827ece1134" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 629.857842] env[69367]: DEBUG oslo_concurrency.lockutils [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Releasing lock "refresh_cache-3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 629.859465] env[69367]: DEBUG nova.compute.manager [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] Instance network_info: |[{"id": "b6a0688d-a5a2-4937-9ac7-25b53f9b001d", "address": "fa:16:3e:a7:29:36", "network": {"id": "346c9326-4d8c-4f7c-b346-ea12f5dd891e", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1718591398-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "06c46b0af1af4a788c5e7159fc2daa3d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0ef5aba-bd9a-42ff-a1a0-5e763986d70a", "external-id": "nsx-vlan-transportzone-209", "segmentation_id": 209, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb6a0688d-a5", "ovs_interfaceid": "b6a0688d-a5a2-4937-9ac7-25b53f9b001d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69367) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 629.863717] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a7:29:36', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f0ef5aba-bd9a-42ff-a1a0-5e763986d70a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b6a0688d-a5a2-4937-9ac7-25b53f9b001d', 'vif_model': 'vmxnet3'}] {{(pid=69367) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 629.873170] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Creating folder: Project (06c46b0af1af4a788c5e7159fc2daa3d). Parent ref: group-v837645. {{(pid=69367) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 629.875595] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5b71a331-cbc7-4afa-a105-414fd376162c {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.890877] env[69367]: INFO nova.virt.vmwareapi.vm_util [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Created folder: Project (06c46b0af1af4a788c5e7159fc2daa3d) in parent group-v837645. [ 629.890877] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Creating folder: Instances. Parent ref: group-v837668. {{(pid=69367) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 629.895030] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-11f99b2c-08ea-4657-a20b-0d223731e774 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.900217] env[69367]: INFO nova.scheduler.client.report [None req-5ef0996f-0500-429e-97b5-259cde98018c tempest-FloatingIPsAssociationTestJSON-1476451203 tempest-FloatingIPsAssociationTestJSON-1476451203-project-member] Deleted allocations for instance 7a13d45a-1941-4caf-a510-34b11d78b5e7 [ 629.918656] env[69367]: INFO nova.virt.vmwareapi.vm_util [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Created folder: Instances in parent group-v837668. [ 629.918942] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 629.919219] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] Creating VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 629.919468] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-536a0314-3850-45d8-b4ce-ac5ff060039d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.946805] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 83fb3858-0c21-42f1-a815-f007bcdb8561 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 629.947129] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=69367) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 629.947286] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2432MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=69367) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 629.953364] env[69367]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 629.953364] env[69367]: value = "task-4233739" [ 629.953364] env[69367]: _type = "Task" [ 629.953364] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 629.968380] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4233739, 'name': CreateVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.983602] env[69367]: DEBUG nova.scheduler.client.report [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 630.009627] env[69367]: DEBUG oslo_vmware.api [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Task: {'id': task-4233736, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.010550] env[69367]: DEBUG nova.scheduler.client.report [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 630.010804] env[69367]: DEBUG nova.compute.provider_tree [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 630.029697] env[69367]: DEBUG nova.scheduler.client.report [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: a14b5a5f-d0f8-48c5-a513-99fc42773a7e {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 630.059886] env[69367]: DEBUG nova.scheduler.client.report [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 630.168853] env[69367]: DEBUG nova.network.neutron [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] [instance: 92bdb1b1-d8ab-46b2-9037-ee8fea4642ce] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 630.293162] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-a9e074d4-0a69-454a-b66f-e71b40fc694f tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] Creating Snapshot of the VM instance {{(pid=69367) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 630.296344] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-b2f64ffe-d19b-40d2-8996-055a82700a03 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.306382] env[69367]: DEBUG oslo_vmware.api [None req-a9e074d4-0a69-454a-b66f-e71b40fc694f tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Waiting for the task: (returnval){ [ 630.306382] env[69367]: value = "task-4233740" [ 630.306382] env[69367]: _type = "Task" [ 630.306382] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.325266] env[69367]: DEBUG oslo_vmware.api [None req-a9e074d4-0a69-454a-b66f-e71b40fc694f tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Task: {'id': task-4233740, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.346445] env[69367]: DEBUG oslo_concurrency.lockutils [None req-7b94712f-24c6-4e7c-bb81-62b66d431601 tempest-DeleteServersAdminTestJSON-676728523 tempest-DeleteServersAdminTestJSON-676728523-project-admin] Acquiring lock "5c7b2127-e875-4222-8148-a2ea60631c25" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 630.346445] env[69367]: DEBUG oslo_concurrency.lockutils [None req-7b94712f-24c6-4e7c-bb81-62b66d431601 tempest-DeleteServersAdminTestJSON-676728523 tempest-DeleteServersAdminTestJSON-676728523-project-admin] Lock "5c7b2127-e875-4222-8148-a2ea60631c25" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 630.346445] env[69367]: DEBUG oslo_concurrency.lockutils [None req-7b94712f-24c6-4e7c-bb81-62b66d431601 tempest-DeleteServersAdminTestJSON-676728523 tempest-DeleteServersAdminTestJSON-676728523-project-admin] Acquiring lock "5c7b2127-e875-4222-8148-a2ea60631c25-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 630.346445] env[69367]: DEBUG oslo_concurrency.lockutils [None req-7b94712f-24c6-4e7c-bb81-62b66d431601 tempest-DeleteServersAdminTestJSON-676728523 tempest-DeleteServersAdminTestJSON-676728523-project-admin] Lock "5c7b2127-e875-4222-8148-a2ea60631c25-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 630.346803] env[69367]: DEBUG oslo_concurrency.lockutils [None req-7b94712f-24c6-4e7c-bb81-62b66d431601 tempest-DeleteServersAdminTestJSON-676728523 tempest-DeleteServersAdminTestJSON-676728523-project-admin] Lock "5c7b2127-e875-4222-8148-a2ea60631c25-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 630.353190] env[69367]: INFO nova.compute.manager [None req-7b94712f-24c6-4e7c-bb81-62b66d431601 tempest-DeleteServersAdminTestJSON-676728523 tempest-DeleteServersAdminTestJSON-676728523-project-admin] [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] Terminating instance [ 630.410926] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5ef0996f-0500-429e-97b5-259cde98018c tempest-FloatingIPsAssociationTestJSON-1476451203 tempest-FloatingIPsAssociationTestJSON-1476451203-project-member] Lock "7a13d45a-1941-4caf-a510-34b11d78b5e7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.486s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 630.466736] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4233739, 'name': CreateVM_Task, 'duration_secs': 0.477336} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.469845] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] Created VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 630.470963] env[69367]: DEBUG oslo_concurrency.lockutils [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 630.471185] env[69367]: DEBUG oslo_concurrency.lockutils [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 630.471628] env[69367]: DEBUG oslo_concurrency.lockutils [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 630.471746] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e2ddf5d9-63ee-433a-ab4e-293e0bcfbbb2 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.477998] env[69367]: DEBUG oslo_vmware.api [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Waiting for the task: (returnval){ [ 630.477998] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]5240aed2-f82e-229d-d1b1-8597bb2cd41e" [ 630.477998] env[69367]: _type = "Task" [ 630.477998] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.491112] env[69367]: DEBUG oslo_vmware.api [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]5240aed2-f82e-229d-d1b1-8597bb2cd41e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.510899] env[69367]: DEBUG nova.network.neutron [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] [instance: 92bdb1b1-d8ab-46b2-9037-ee8fea4642ce] Updating instance_info_cache with network_info: [{"id": "a7fe18e3-9f20-481e-b223-1b2907709041", "address": "fa:16:3e:23:f7:a6", "network": {"id": "afabf187-1494-4413-aabe-98c264714fc7", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-2057114382-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4840471cf7844a1aac397d7ee7db12d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d377d75-3add-4a15-8691-74b2eb010924", "external-id": "nsx-vlan-transportzone-71", "segmentation_id": 71, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa7fe18e3-9f", "ovs_interfaceid": "a7fe18e3-9f20-481e-b223-1b2907709041", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 630.518364] env[69367]: DEBUG oslo_vmware.api [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Task: {'id': task-4233736, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.616457} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.518994] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore2] e1c7d100-4ad7-4871-970f-bb7562bfc6fc/e1c7d100-4ad7-4871-970f-bb7562bfc6fc.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 630.519293] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] Extending root virtual disk to 1048576 {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 630.519632] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b86ed2ce-3999-4f42-8864-74916438cf9e {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.537689] env[69367]: DEBUG oslo_vmware.api [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Waiting for the task: (returnval){ [ 630.537689] env[69367]: value = "task-4233741" [ 630.537689] env[69367]: _type = "Task" [ 630.537689] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.548269] env[69367]: DEBUG oslo_vmware.api [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Task: {'id': task-4233741, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.549537] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-077d3b3b-726a-4b0e-98e7-045bce5bf799 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.558732] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8725691d-b556-4461-873a-121cabf9d7aa {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.596999] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd6fa008-bdb7-4b3c-9ff4-35e4b1251e1e {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.606834] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a55b08c4-bf8b-423e-b739-74614aa81117 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.624119] env[69367]: DEBUG nova.compute.provider_tree [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 630.769498] env[69367]: DEBUG nova.compute.manager [None req-ed5fd3dc-98e9-4dcf-a7f0-6075d7b44b6d tempest-ServerDiagnosticsTest-1566212184 tempest-ServerDiagnosticsTest-1566212184-project-admin] [instance: 1302cad6-55b7-4905-92c1-dfdd37042e30] Checking state {{(pid=69367) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 630.772667] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3774d739-1486-4344-9d04-1fd86aa1c769 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.782335] env[69367]: INFO nova.compute.manager [None req-ed5fd3dc-98e9-4dcf-a7f0-6075d7b44b6d tempest-ServerDiagnosticsTest-1566212184 tempest-ServerDiagnosticsTest-1566212184-project-admin] [instance: 1302cad6-55b7-4905-92c1-dfdd37042e30] Retrieving diagnostics [ 630.783180] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d4839c2-9e77-434b-a2a4-1642b265f79b {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.829246] env[69367]: DEBUG oslo_concurrency.lockutils [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Acquiring lock "8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 630.829482] env[69367]: DEBUG oslo_concurrency.lockutils [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Lock "8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 630.836240] env[69367]: DEBUG oslo_vmware.api [None req-a9e074d4-0a69-454a-b66f-e71b40fc694f tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Task: {'id': task-4233740, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.858642] env[69367]: DEBUG nova.compute.manager [None req-7b94712f-24c6-4e7c-bb81-62b66d431601 tempest-DeleteServersAdminTestJSON-676728523 tempest-DeleteServersAdminTestJSON-676728523-project-admin] [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] Start destroying the instance on the hypervisor. {{(pid=69367) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 630.859688] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-7b94712f-24c6-4e7c-bb81-62b66d431601 tempest-DeleteServersAdminTestJSON-676728523 tempest-DeleteServersAdminTestJSON-676728523-project-admin] [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] Destroying instance {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 630.860015] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cbc945b-fda7-47cb-b56f-fd08725d9a1f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.869073] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b94712f-24c6-4e7c-bb81-62b66d431601 tempest-DeleteServersAdminTestJSON-676728523 tempest-DeleteServersAdminTestJSON-676728523-project-admin] [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] Powering off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 630.869398] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cd0dc5e1-6d35-452a-a602-b7f59ae49cf3 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.878610] env[69367]: DEBUG oslo_vmware.api [None req-7b94712f-24c6-4e7c-bb81-62b66d431601 tempest-DeleteServersAdminTestJSON-676728523 tempest-DeleteServersAdminTestJSON-676728523-project-admin] Waiting for the task: (returnval){ [ 630.878610] env[69367]: value = "task-4233742" [ 630.878610] env[69367]: _type = "Task" [ 630.878610] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.889483] env[69367]: DEBUG oslo_vmware.api [None req-7b94712f-24c6-4e7c-bb81-62b66d431601 tempest-DeleteServersAdminTestJSON-676728523 tempest-DeleteServersAdminTestJSON-676728523-project-admin] Task: {'id': task-4233742, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.918029] env[69367]: DEBUG nova.compute.manager [None req-37f23c14-fa8a-4512-b66e-872b851a765a tempest-ImagesOneServerNegativeTestJSON-435923336 tempest-ImagesOneServerNegativeTestJSON-435923336-project-member] [instance: 4bef75af-bbe8-4c6e-8c06-9c827ece1134] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 630.993749] env[69367]: DEBUG oslo_vmware.api [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]5240aed2-f82e-229d-d1b1-8597bb2cd41e, 'name': SearchDatastore_Task, 'duration_secs': 0.011437} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.995792] env[69367]: DEBUG oslo_concurrency.lockutils [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 630.996523] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] Processing image 2b099420-9152-4d93-9609-4c9317824c11 {{(pid=69367) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 630.996642] env[69367]: DEBUG oslo_concurrency.lockutils [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 630.997139] env[69367]: DEBUG oslo_concurrency.lockutils [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 630.997139] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 630.997402] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a1d1aa97-8831-4685-9e0a-01653b23ec31 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.008756] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 631.009254] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69367) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 631.009940] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2d5d3115-b84e-44c7-a04e-5f8a6ecc86a0 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.019905] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Releasing lock "refresh_cache-92bdb1b1-d8ab-46b2-9037-ee8fea4642ce" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 631.020111] env[69367]: DEBUG nova.compute.manager [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] [instance: 92bdb1b1-d8ab-46b2-9037-ee8fea4642ce] Instance network_info: |[{"id": "a7fe18e3-9f20-481e-b223-1b2907709041", "address": "fa:16:3e:23:f7:a6", "network": {"id": "afabf187-1494-4413-aabe-98c264714fc7", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-2057114382-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4840471cf7844a1aac397d7ee7db12d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d377d75-3add-4a15-8691-74b2eb010924", "external-id": "nsx-vlan-transportzone-71", "segmentation_id": 71, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa7fe18e3-9f", "ovs_interfaceid": "a7fe18e3-9f20-481e-b223-1b2907709041", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69367) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 631.024060] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] [instance: 92bdb1b1-d8ab-46b2-9037-ee8fea4642ce] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:23:f7:a6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7d377d75-3add-4a15-8691-74b2eb010924', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a7fe18e3-9f20-481e-b223-1b2907709041', 'vif_model': 'vmxnet3'}] {{(pid=69367) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 631.032347] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Creating folder: Project (4840471cf7844a1aac397d7ee7db12d4). Parent ref: group-v837645. {{(pid=69367) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 631.033308] env[69367]: DEBUG oslo_vmware.api [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Waiting for the task: (returnval){ [ 631.033308] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52ab736c-f1c6-33b7-4d39-43114d6a838d" [ 631.033308] env[69367]: _type = "Task" [ 631.033308] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 631.034881] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e1ab9962-2da5-4299-9f6a-813203c56b3a {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.055340] env[69367]: DEBUG oslo_vmware.api [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52ab736c-f1c6-33b7-4d39-43114d6a838d, 'name': SearchDatastore_Task, 'duration_secs': 0.012891} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 631.055780] env[69367]: DEBUG oslo_vmware.api [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Task: {'id': task-4233741, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.060655] env[69367]: INFO nova.virt.vmwareapi.vm_util [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Created folder: Project (4840471cf7844a1aac397d7ee7db12d4) in parent group-v837645. [ 631.060655] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Creating folder: Instances. Parent ref: group-v837671. {{(pid=69367) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 631.060655] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-353610ee-f63a-4235-ba06-ef7c8d17f934 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.061197] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-188e1bc8-dce2-4472-b812-b27989287687 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.068724] env[69367]: DEBUG oslo_vmware.api [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Waiting for the task: (returnval){ [ 631.068724] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]5214aaec-e254-2ebb-c0bb-40738d46de77" [ 631.068724] env[69367]: _type = "Task" [ 631.068724] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 631.076136] env[69367]: INFO nova.virt.vmwareapi.vm_util [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Created folder: Instances in parent group-v837671. [ 631.076136] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 631.076774] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 92bdb1b1-d8ab-46b2-9037-ee8fea4642ce] Creating VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 631.077048] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-26ab8932-0058-4bda-ba00-5df3c53788d5 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.101684] env[69367]: DEBUG oslo_vmware.api [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]5214aaec-e254-2ebb-c0bb-40738d46de77, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.103132] env[69367]: DEBUG nova.compute.manager [req-52ef197f-05c9-4fa3-b002-69c7c9262744 req-61eabfb4-525a-4804-a768-4ec148ed8a6b service nova] [instance: 92bdb1b1-d8ab-46b2-9037-ee8fea4642ce] Received event network-vif-plugged-a7fe18e3-9f20-481e-b223-1b2907709041 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 631.103132] env[69367]: DEBUG oslo_concurrency.lockutils [req-52ef197f-05c9-4fa3-b002-69c7c9262744 req-61eabfb4-525a-4804-a768-4ec148ed8a6b service nova] Acquiring lock "92bdb1b1-d8ab-46b2-9037-ee8fea4642ce-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 631.103351] env[69367]: DEBUG oslo_concurrency.lockutils [req-52ef197f-05c9-4fa3-b002-69c7c9262744 req-61eabfb4-525a-4804-a768-4ec148ed8a6b service nova] Lock "92bdb1b1-d8ab-46b2-9037-ee8fea4642ce-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 631.106181] env[69367]: DEBUG oslo_concurrency.lockutils [req-52ef197f-05c9-4fa3-b002-69c7c9262744 req-61eabfb4-525a-4804-a768-4ec148ed8a6b service nova] Lock "92bdb1b1-d8ab-46b2-9037-ee8fea4642ce-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 631.106181] env[69367]: DEBUG nova.compute.manager [req-52ef197f-05c9-4fa3-b002-69c7c9262744 req-61eabfb4-525a-4804-a768-4ec148ed8a6b service nova] [instance: 92bdb1b1-d8ab-46b2-9037-ee8fea4642ce] No waiting events found dispatching network-vif-plugged-a7fe18e3-9f20-481e-b223-1b2907709041 {{(pid=69367) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 631.106181] env[69367]: WARNING nova.compute.manager [req-52ef197f-05c9-4fa3-b002-69c7c9262744 req-61eabfb4-525a-4804-a768-4ec148ed8a6b service nova] [instance: 92bdb1b1-d8ab-46b2-9037-ee8fea4642ce] Received unexpected event network-vif-plugged-a7fe18e3-9f20-481e-b223-1b2907709041 for instance with vm_state building and task_state spawning. [ 631.110853] env[69367]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 631.110853] env[69367]: value = "task-4233745" [ 631.110853] env[69367]: _type = "Task" [ 631.110853] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 631.121145] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4233745, 'name': CreateVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.150388] env[69367]: ERROR nova.scheduler.client.report [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] [req-d4758030-80cb-40fe-b2d9-44b168bd52f4] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-d4758030-80cb-40fe-b2d9-44b168bd52f4"}]} [ 631.151068] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 4.782s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 631.152067] env[69367]: ERROR nova.compute.manager [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Error updating resources for node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28.: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 631.152067] env[69367]: ERROR nova.compute.manager Traceback (most recent call last): [ 631.152067] env[69367]: ERROR nova.compute.manager File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 631.152067] env[69367]: ERROR nova.compute.manager yield [ 631.152067] env[69367]: ERROR nova.compute.manager File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 631.152067] env[69367]: ERROR nova.compute.manager self.set_inventory_for_provider( [ 631.152067] env[69367]: ERROR nova.compute.manager File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 631.152067] env[69367]: ERROR nova.compute.manager raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 631.152067] env[69367]: ERROR nova.compute.manager nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-d4758030-80cb-40fe-b2d9-44b168bd52f4"}]} [ 631.152067] env[69367]: ERROR nova.compute.manager [ 631.152067] env[69367]: ERROR nova.compute.manager During handling of the above exception, another exception occurred: [ 631.152067] env[69367]: ERROR nova.compute.manager [ 631.152545] env[69367]: ERROR nova.compute.manager Traceback (most recent call last): [ 631.152545] env[69367]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 11219, in _update_available_resource_for_node [ 631.152545] env[69367]: ERROR nova.compute.manager self.rt.update_available_resource(context, nodename, [ 631.152545] env[69367]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/resource_tracker.py", line 965, in update_available_resource [ 631.152545] env[69367]: ERROR nova.compute.manager self._update_available_resource(context, resources, startup=startup) [ 631.152545] env[69367]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 631.152545] env[69367]: ERROR nova.compute.manager return f(*args, **kwargs) [ 631.152545] env[69367]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1096, in _update_available_resource [ 631.152545] env[69367]: ERROR nova.compute.manager self._update(context, cn, startup=startup) [ 631.152545] env[69367]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 631.152545] env[69367]: ERROR nova.compute.manager self._update_to_placement(context, compute_node, startup) [ 631.152545] env[69367]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 631.152545] env[69367]: ERROR nova.compute.manager return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 631.152545] env[69367]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 631.152545] env[69367]: ERROR nova.compute.manager return attempt.get(self._wrap_exception) [ 631.152545] env[69367]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 631.152545] env[69367]: ERROR nova.compute.manager six.reraise(self.value[0], self.value[1], self.value[2]) [ 631.152545] env[69367]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 631.153081] env[69367]: ERROR nova.compute.manager raise value [ 631.153081] env[69367]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 631.153081] env[69367]: ERROR nova.compute.manager attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 631.153081] env[69367]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 631.153081] env[69367]: ERROR nova.compute.manager self.reportclient.update_from_provider_tree( [ 631.153081] env[69367]: ERROR nova.compute.manager File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 631.153081] env[69367]: ERROR nova.compute.manager with catch_all(pd.uuid): [ 631.153081] env[69367]: ERROR nova.compute.manager File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 631.153081] env[69367]: ERROR nova.compute.manager self.gen.throw(typ, value, traceback) [ 631.153081] env[69367]: ERROR nova.compute.manager File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 631.153081] env[69367]: ERROR nova.compute.manager raise exception.ResourceProviderSyncFailed() [ 631.153081] env[69367]: ERROR nova.compute.manager nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 631.153081] env[69367]: ERROR nova.compute.manager [ 631.153081] env[69367]: DEBUG oslo_concurrency.lockutils [None req-3682f758-1480-4596-869e-15f72e534e72 tempest-ServersWithSpecificFlavorTestJSON-1357925327 tempest-ServersWithSpecificFlavorTestJSON-1357925327-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.610s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 631.154838] env[69367]: INFO nova.compute.claims [None req-3682f758-1480-4596-869e-15f72e534e72 tempest-ServersWithSpecificFlavorTestJSON-1357925327 tempest-ServersWithSpecificFlavorTestJSON-1357925327-project-member] [instance: f11c0d77-b53c-4d96-820d-bd3ff3a08955] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 631.230570] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0bdb360b-0170-4e12-942c-b15ee8c54cb9 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Acquiring lock "a358ce6d-9826-4ddb-8c2f-51bac8db59d4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 631.230828] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0bdb360b-0170-4e12-942c-b15ee8c54cb9 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Lock "a358ce6d-9826-4ddb-8c2f-51bac8db59d4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 631.231054] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0bdb360b-0170-4e12-942c-b15ee8c54cb9 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Acquiring lock "a358ce6d-9826-4ddb-8c2f-51bac8db59d4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 631.231312] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0bdb360b-0170-4e12-942c-b15ee8c54cb9 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Lock "a358ce6d-9826-4ddb-8c2f-51bac8db59d4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 631.231516] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0bdb360b-0170-4e12-942c-b15ee8c54cb9 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Lock "a358ce6d-9826-4ddb-8c2f-51bac8db59d4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 631.234838] env[69367]: INFO nova.compute.manager [None req-0bdb360b-0170-4e12-942c-b15ee8c54cb9 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] Terminating instance [ 631.329250] env[69367]: DEBUG oslo_vmware.api [None req-a9e074d4-0a69-454a-b66f-e71b40fc694f tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Task: {'id': task-4233740, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.393473] env[69367]: DEBUG oslo_vmware.api [None req-7b94712f-24c6-4e7c-bb81-62b66d431601 tempest-DeleteServersAdminTestJSON-676728523 tempest-DeleteServersAdminTestJSON-676728523-project-admin] Task: {'id': task-4233742, 'name': PowerOffVM_Task, 'duration_secs': 0.253885} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 631.394051] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b94712f-24c6-4e7c-bb81-62b66d431601 tempest-DeleteServersAdminTestJSON-676728523 tempest-DeleteServersAdminTestJSON-676728523-project-admin] [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] Powered off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 631.394289] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-7b94712f-24c6-4e7c-bb81-62b66d431601 tempest-DeleteServersAdminTestJSON-676728523 tempest-DeleteServersAdminTestJSON-676728523-project-admin] [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] Unregistering the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 631.394662] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-835f3674-40b2-4b32-af93-6d885c9bdb9f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.446286] env[69367]: DEBUG oslo_concurrency.lockutils [None req-37f23c14-fa8a-4512-b66e-872b851a765a tempest-ImagesOneServerNegativeTestJSON-435923336 tempest-ImagesOneServerNegativeTestJSON-435923336-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 631.476799] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-7b94712f-24c6-4e7c-bb81-62b66d431601 tempest-DeleteServersAdminTestJSON-676728523 tempest-DeleteServersAdminTestJSON-676728523-project-admin] [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] Unregistered the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 631.477228] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-7b94712f-24c6-4e7c-bb81-62b66d431601 tempest-DeleteServersAdminTestJSON-676728523 tempest-DeleteServersAdminTestJSON-676728523-project-admin] [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] Deleting contents of the VM from datastore datastore1 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 631.477527] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b94712f-24c6-4e7c-bb81-62b66d431601 tempest-DeleteServersAdminTestJSON-676728523 tempest-DeleteServersAdminTestJSON-676728523-project-admin] Deleting the datastore file [datastore1] 5c7b2127-e875-4222-8148-a2ea60631c25 {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 631.477865] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e5b2c26e-e326-4182-8bbf-04fd7135f70f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.488778] env[69367]: DEBUG oslo_vmware.api [None req-7b94712f-24c6-4e7c-bb81-62b66d431601 tempest-DeleteServersAdminTestJSON-676728523 tempest-DeleteServersAdminTestJSON-676728523-project-admin] Waiting for the task: (returnval){ [ 631.488778] env[69367]: value = "task-4233747" [ 631.488778] env[69367]: _type = "Task" [ 631.488778] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 631.498658] env[69367]: DEBUG oslo_vmware.api [None req-7b94712f-24c6-4e7c-bb81-62b66d431601 tempest-DeleteServersAdminTestJSON-676728523 tempest-DeleteServersAdminTestJSON-676728523-project-admin] Task: {'id': task-4233747, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.553196] env[69367]: DEBUG oslo_vmware.api [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Task: {'id': task-4233741, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.59878} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 631.553608] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] Extended root virtual disk {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 631.554498] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aedb973e-5b4c-42cf-8188-43b40e5a78d5 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.581833] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] Reconfiguring VM instance instance-00000006 to attach disk [datastore2] e1c7d100-4ad7-4871-970f-bb7562bfc6fc/e1c7d100-4ad7-4871-970f-bb7562bfc6fc.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 631.586808] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-749689de-4291-4f80-88fb-327fccc2a6aa {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.609500] env[69367]: DEBUG oslo_vmware.api [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]5214aaec-e254-2ebb-c0bb-40738d46de77, 'name': SearchDatastore_Task, 'duration_secs': 0.021795} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 631.610740] env[69367]: DEBUG oslo_concurrency.lockutils [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 631.611087] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore2] 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9/3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 631.611492] env[69367]: DEBUG oslo_vmware.api [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Waiting for the task: (returnval){ [ 631.611492] env[69367]: value = "task-4233748" [ 631.611492] env[69367]: _type = "Task" [ 631.611492] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 631.611910] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9f9435ab-36e4-4802-a62a-2c3bdc8d51b6 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.629451] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4233745, 'name': CreateVM_Task, 'duration_secs': 0.392191} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 631.634292] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 92bdb1b1-d8ab-46b2-9037-ee8fea4642ce] Created VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 631.634599] env[69367]: DEBUG oslo_vmware.api [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Waiting for the task: (returnval){ [ 631.634599] env[69367]: value = "task-4233749" [ 631.634599] env[69367]: _type = "Task" [ 631.634599] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 631.634888] env[69367]: DEBUG oslo_vmware.api [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Task: {'id': task-4233748, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.635643] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 631.635855] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 631.636260] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 631.636637] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9b264ea1-ecb3-4136-a2a6-e090ca18f043 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.645782] env[69367]: DEBUG oslo_vmware.api [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Waiting for the task: (returnval){ [ 631.645782] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52a50103-88a4-f9da-5278-9c4fd1797a49" [ 631.645782] env[69367]: _type = "Task" [ 631.645782] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 631.649805] env[69367]: DEBUG oslo_vmware.api [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Task: {'id': task-4233749, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.662447] env[69367]: DEBUG oslo_vmware.api [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52a50103-88a4-f9da-5278-9c4fd1797a49, 'name': SearchDatastore_Task, 'duration_secs': 0.009807} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 631.663043] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 631.663347] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] [instance: 92bdb1b1-d8ab-46b2-9037-ee8fea4642ce] Processing image 2b099420-9152-4d93-9609-4c9317824c11 {{(pid=69367) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 631.663666] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 631.663905] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 631.664178] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 631.664533] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-97293a0e-8b7b-48ad-a24f-dee88fb48724 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.679085] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 631.679085] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69367) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 631.679085] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bd43988f-e37a-4a3d-b49b-c960acec8d1a {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.686744] env[69367]: DEBUG oslo_vmware.api [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Waiting for the task: (returnval){ [ 631.686744] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52757107-88c9-9e16-b05e-606bb5d973e1" [ 631.686744] env[69367]: _type = "Task" [ 631.686744] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 631.698587] env[69367]: DEBUG oslo_vmware.api [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52757107-88c9-9e16-b05e-606bb5d973e1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.734437] env[69367]: DEBUG nova.compute.manager [req-8bff3d84-2ce2-45f5-893f-3d2e88573d40 req-6c94cccd-0c32-4868-8dd8-2c2ce07d477f service nova] [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] Received event network-vif-plugged-b6a0688d-a5a2-4937-9ac7-25b53f9b001d {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 631.734437] env[69367]: DEBUG oslo_concurrency.lockutils [req-8bff3d84-2ce2-45f5-893f-3d2e88573d40 req-6c94cccd-0c32-4868-8dd8-2c2ce07d477f service nova] Acquiring lock "3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 631.734437] env[69367]: DEBUG oslo_concurrency.lockutils [req-8bff3d84-2ce2-45f5-893f-3d2e88573d40 req-6c94cccd-0c32-4868-8dd8-2c2ce07d477f service nova] Lock "3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 631.734437] env[69367]: DEBUG oslo_concurrency.lockutils [req-8bff3d84-2ce2-45f5-893f-3d2e88573d40 req-6c94cccd-0c32-4868-8dd8-2c2ce07d477f service nova] Lock "3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 631.734437] env[69367]: DEBUG nova.compute.manager [req-8bff3d84-2ce2-45f5-893f-3d2e88573d40 req-6c94cccd-0c32-4868-8dd8-2c2ce07d477f service nova] [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] No waiting events found dispatching network-vif-plugged-b6a0688d-a5a2-4937-9ac7-25b53f9b001d {{(pid=69367) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 631.734703] env[69367]: WARNING nova.compute.manager [req-8bff3d84-2ce2-45f5-893f-3d2e88573d40 req-6c94cccd-0c32-4868-8dd8-2c2ce07d477f service nova] [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] Received unexpected event network-vif-plugged-b6a0688d-a5a2-4937-9ac7-25b53f9b001d for instance with vm_state building and task_state spawning. [ 631.734703] env[69367]: DEBUG nova.compute.manager [req-8bff3d84-2ce2-45f5-893f-3d2e88573d40 req-6c94cccd-0c32-4868-8dd8-2c2ce07d477f service nova] [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] Received event network-changed-b6a0688d-a5a2-4937-9ac7-25b53f9b001d {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 631.734703] env[69367]: DEBUG nova.compute.manager [req-8bff3d84-2ce2-45f5-893f-3d2e88573d40 req-6c94cccd-0c32-4868-8dd8-2c2ce07d477f service nova] [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] Refreshing instance network info cache due to event network-changed-b6a0688d-a5a2-4937-9ac7-25b53f9b001d. {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11772}} [ 631.734703] env[69367]: DEBUG oslo_concurrency.lockutils [req-8bff3d84-2ce2-45f5-893f-3d2e88573d40 req-6c94cccd-0c32-4868-8dd8-2c2ce07d477f service nova] Acquiring lock "refresh_cache-3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 631.734703] env[69367]: DEBUG oslo_concurrency.lockutils [req-8bff3d84-2ce2-45f5-893f-3d2e88573d40 req-6c94cccd-0c32-4868-8dd8-2c2ce07d477f service nova] Acquired lock "refresh_cache-3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 631.734852] env[69367]: DEBUG nova.network.neutron [req-8bff3d84-2ce2-45f5-893f-3d2e88573d40 req-6c94cccd-0c32-4868-8dd8-2c2ce07d477f service nova] [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] Refreshing network info cache for port b6a0688d-a5a2-4937-9ac7-25b53f9b001d {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 631.739857] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0bdb360b-0170-4e12-942c-b15ee8c54cb9 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Acquiring lock "refresh_cache-a358ce6d-9826-4ddb-8c2f-51bac8db59d4" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 631.740140] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0bdb360b-0170-4e12-942c-b15ee8c54cb9 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Acquired lock "refresh_cache-a358ce6d-9826-4ddb-8c2f-51bac8db59d4" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 631.740299] env[69367]: DEBUG nova.network.neutron [None req-0bdb360b-0170-4e12-942c-b15ee8c54cb9 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 631.838169] env[69367]: DEBUG oslo_vmware.api [None req-a9e074d4-0a69-454a-b66f-e71b40fc694f tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Task: {'id': task-4233740, 'name': CreateSnapshot_Task, 'duration_secs': 1.050685} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 631.838169] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-a9e074d4-0a69-454a-b66f-e71b40fc694f tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] Created Snapshot of the VM instance {{(pid=69367) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 631.838481] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9ffd1a7-447f-493a-b7c2-4b37571faa92 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.004460] env[69367]: DEBUG oslo_vmware.api [None req-7b94712f-24c6-4e7c-bb81-62b66d431601 tempest-DeleteServersAdminTestJSON-676728523 tempest-DeleteServersAdminTestJSON-676728523-project-admin] Task: {'id': task-4233747, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.139296} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.004739] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b94712f-24c6-4e7c-bb81-62b66d431601 tempest-DeleteServersAdminTestJSON-676728523 tempest-DeleteServersAdminTestJSON-676728523-project-admin] Deleted the datastore file {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 632.004997] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-7b94712f-24c6-4e7c-bb81-62b66d431601 tempest-DeleteServersAdminTestJSON-676728523 tempest-DeleteServersAdminTestJSON-676728523-project-admin] [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] Deleted contents of the VM from datastore datastore1 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 632.005199] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-7b94712f-24c6-4e7c-bb81-62b66d431601 tempest-DeleteServersAdminTestJSON-676728523 tempest-DeleteServersAdminTestJSON-676728523-project-admin] [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] Instance destroyed {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 632.005391] env[69367]: INFO nova.compute.manager [None req-7b94712f-24c6-4e7c-bb81-62b66d431601 tempest-DeleteServersAdminTestJSON-676728523 tempest-DeleteServersAdminTestJSON-676728523-project-admin] [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] Took 1.15 seconds to destroy the instance on the hypervisor. [ 632.005643] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7b94712f-24c6-4e7c-bb81-62b66d431601 tempest-DeleteServersAdminTestJSON-676728523 tempest-DeleteServersAdminTestJSON-676728523-project-admin] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 632.005903] env[69367]: DEBUG nova.compute.manager [-] [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 632.006009] env[69367]: DEBUG nova.network.neutron [-] [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 632.130269] env[69367]: DEBUG oslo_vmware.api [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Task: {'id': task-4233748, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.159457] env[69367]: DEBUG oslo_vmware.api [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Task: {'id': task-4233749, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.522937} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.160438] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore2] 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9/3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 632.160657] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] Extending root virtual disk to 1048576 {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 632.168309] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cd55e4ae-52bf-48d4-81a4-a14be8edf02b {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.186113] env[69367]: DEBUG oslo_vmware.api [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Waiting for the task: (returnval){ [ 632.186113] env[69367]: value = "task-4233750" [ 632.186113] env[69367]: _type = "Task" [ 632.186113] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 632.202597] env[69367]: DEBUG oslo_vmware.api [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52757107-88c9-9e16-b05e-606bb5d973e1, 'name': SearchDatastore_Task, 'duration_secs': 0.010884} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.204987] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-04499cff-c6f9-4177-8b1e-5e55be5855d8 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.214262] env[69367]: DEBUG oslo_vmware.api [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Waiting for the task: (returnval){ [ 632.214262] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52a42ff5-6601-da83-f570-315c1435af16" [ 632.214262] env[69367]: _type = "Task" [ 632.214262] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 632.215517] env[69367]: DEBUG nova.scheduler.client.report [None req-3682f758-1480-4596-869e-15f72e534e72 tempest-ServersWithSpecificFlavorTestJSON-1357925327 tempest-ServersWithSpecificFlavorTestJSON-1357925327-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 632.222478] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Acquiring lock "ba4d981a-19f7-41ef-b7d1-a3f3830fe725" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 632.222478] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Lock "ba4d981a-19f7-41ef-b7d1-a3f3830fe725" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 632.238699] env[69367]: DEBUG oslo_vmware.api [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52a42ff5-6601-da83-f570-315c1435af16, 'name': SearchDatastore_Task, 'duration_secs': 0.011297} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.240029] env[69367]: DEBUG nova.scheduler.client.report [None req-3682f758-1480-4596-869e-15f72e534e72 tempest-ServersWithSpecificFlavorTestJSON-1357925327 tempest-ServersWithSpecificFlavorTestJSON-1357925327-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 632.240029] env[69367]: DEBUG nova.compute.provider_tree [None req-3682f758-1480-4596-869e-15f72e534e72 tempest-ServersWithSpecificFlavorTestJSON-1357925327 tempest-ServersWithSpecificFlavorTestJSON-1357925327-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 632.243334] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 632.243334] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore2] 92bdb1b1-d8ab-46b2-9037-ee8fea4642ce/92bdb1b1-d8ab-46b2-9037-ee8fea4642ce.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 632.243334] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-65059ca3-4378-43ff-a151-2dbdf95ddb62 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.256523] env[69367]: DEBUG oslo_vmware.api [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Waiting for the task: (returnval){ [ 632.256523] env[69367]: value = "task-4233751" [ 632.256523] env[69367]: _type = "Task" [ 632.256523] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 632.257685] env[69367]: DEBUG nova.scheduler.client.report [None req-3682f758-1480-4596-869e-15f72e534e72 tempest-ServersWithSpecificFlavorTestJSON-1357925327 tempest-ServersWithSpecificFlavorTestJSON-1357925327-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 632.274658] env[69367]: DEBUG oslo_vmware.api [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Task: {'id': task-4233751, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.281266] env[69367]: DEBUG nova.network.neutron [None req-0bdb360b-0170-4e12-942c-b15ee8c54cb9 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 632.285834] env[69367]: DEBUG nova.scheduler.client.report [None req-3682f758-1480-4596-869e-15f72e534e72 tempest-ServersWithSpecificFlavorTestJSON-1357925327 tempest-ServersWithSpecificFlavorTestJSON-1357925327-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 632.363408] env[69367]: DEBUG nova.compute.manager [None req-a9e074d4-0a69-454a-b66f-e71b40fc694f tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] Instance disappeared during snapshot {{(pid=69367) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4595}} [ 632.372145] env[69367]: DEBUG nova.network.neutron [None req-0bdb360b-0170-4e12-942c-b15ee8c54cb9 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 632.573082] env[69367]: DEBUG nova.compute.manager [None req-a9e074d4-0a69-454a-b66f-e71b40fc694f tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] Found 0 images (rotation: 2) {{(pid=69367) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 632.641097] env[69367]: DEBUG oslo_vmware.api [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Task: {'id': task-4233748, 'name': ReconfigVM_Task, 'duration_secs': 0.543998} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.641618] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] Reconfigured VM instance instance-00000006 to attach disk [datastore2] e1c7d100-4ad7-4871-970f-bb7562bfc6fc/e1c7d100-4ad7-4871-970f-bb7562bfc6fc.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 632.642187] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3d6636e1-60a5-4a4c-b91a-6bd779a07d70 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.654861] env[69367]: DEBUG oslo_vmware.api [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Waiting for the task: (returnval){ [ 632.654861] env[69367]: value = "task-4233752" [ 632.654861] env[69367]: _type = "Task" [ 632.654861] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 632.668616] env[69367]: DEBUG oslo_vmware.api [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Task: {'id': task-4233752, 'name': Rename_Task} progress is 5%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.675054] env[69367]: INFO nova.compute.manager [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Rebuilding instance [ 632.705362] env[69367]: DEBUG oslo_vmware.api [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Task: {'id': task-4233750, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.090699} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.707049] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] Extended root virtual disk {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 632.708934] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91749360-b4a3-482c-afdd-9dfa9290c5b6 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.749233] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] Reconfiguring VM instance instance-00000007 to attach disk [datastore2] 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9/3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 632.755915] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d3e2e155-80e3-434b-ae00-4a0839fe58dd {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.773145] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd65f37c-afba-4c50-955e-0d45de9015da {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.782694] env[69367]: DEBUG nova.compute.manager [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Checking state {{(pid=69367) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 632.789042] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ab3c019-603b-4617-9f4e-c9acdd0b975d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.794242] env[69367]: DEBUG oslo_vmware.api [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Waiting for the task: (returnval){ [ 632.794242] env[69367]: value = "task-4233753" [ 632.794242] env[69367]: _type = "Task" [ 632.794242] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 632.799274] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c7e4ca2-0cce-419a-90df-a6d10dddfaa1 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.810057] env[69367]: DEBUG oslo_vmware.api [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Task: {'id': task-4233751, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.473038} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.812099] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore2] 92bdb1b1-d8ab-46b2-9037-ee8fea4642ce/92bdb1b1-d8ab-46b2-9037-ee8fea4642ce.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 632.812099] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] [instance: 92bdb1b1-d8ab-46b2-9037-ee8fea4642ce] Extending root virtual disk to 1048576 {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 632.812754] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5a2e6010-7164-423b-8dd3-700109d05961 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.852964] env[69367]: DEBUG oslo_vmware.api [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Task: {'id': task-4233753, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.857141] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e85b483-edc4-46a8-a84f-72530edb6604 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.862809] env[69367]: DEBUG oslo_vmware.api [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Waiting for the task: (returnval){ [ 632.862809] env[69367]: value = "task-4233754" [ 632.862809] env[69367]: _type = "Task" [ 632.862809] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 632.870600] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b53b4ab1-7071-45fc-b82f-098887f511a0 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.877304] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0bdb360b-0170-4e12-942c-b15ee8c54cb9 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Releasing lock "refresh_cache-a358ce6d-9826-4ddb-8c2f-51bac8db59d4" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 632.877934] env[69367]: DEBUG nova.compute.manager [None req-0bdb360b-0170-4e12-942c-b15ee8c54cb9 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] Start destroying the instance on the hypervisor. {{(pid=69367) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 632.878236] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-0bdb360b-0170-4e12-942c-b15ee8c54cb9 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] Destroying instance {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 632.880223] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64957c8f-ba92-46a9-b54c-c4d5328d805d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.887403] env[69367]: DEBUG oslo_vmware.api [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Task: {'id': task-4233754, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.898213] env[69367]: DEBUG nova.compute.provider_tree [None req-3682f758-1480-4596-869e-15f72e534e72 tempest-ServersWithSpecificFlavorTestJSON-1357925327 tempest-ServersWithSpecificFlavorTestJSON-1357925327-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 632.902911] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-0bdb360b-0170-4e12-942c-b15ee8c54cb9 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] Powering off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 632.903518] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-72b5da25-bd71-4486-b02b-f4c91047af25 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.912324] env[69367]: DEBUG oslo_vmware.api [None req-0bdb360b-0170-4e12-942c-b15ee8c54cb9 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Waiting for the task: (returnval){ [ 632.912324] env[69367]: value = "task-4233755" [ 632.912324] env[69367]: _type = "Task" [ 632.912324] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 632.914903] env[69367]: DEBUG nova.network.neutron [req-8bff3d84-2ce2-45f5-893f-3d2e88573d40 req-6c94cccd-0c32-4868-8dd8-2c2ce07d477f service nova] [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] Updated VIF entry in instance network info cache for port b6a0688d-a5a2-4937-9ac7-25b53f9b001d. {{(pid=69367) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 632.914903] env[69367]: DEBUG nova.network.neutron [req-8bff3d84-2ce2-45f5-893f-3d2e88573d40 req-6c94cccd-0c32-4868-8dd8-2c2ce07d477f service nova] [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] Updating instance_info_cache with network_info: [{"id": "b6a0688d-a5a2-4937-9ac7-25b53f9b001d", "address": "fa:16:3e:a7:29:36", "network": {"id": "346c9326-4d8c-4f7c-b346-ea12f5dd891e", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1718591398-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "06c46b0af1af4a788c5e7159fc2daa3d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0ef5aba-bd9a-42ff-a1a0-5e763986d70a", "external-id": "nsx-vlan-transportzone-209", "segmentation_id": 209, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb6a0688d-a5", "ovs_interfaceid": "b6a0688d-a5a2-4937-9ac7-25b53f9b001d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 632.925642] env[69367]: DEBUG oslo_vmware.api [None req-0bdb360b-0170-4e12-942c-b15ee8c54cb9 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Task: {'id': task-4233755, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.165174] env[69367]: DEBUG oslo_vmware.api [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Task: {'id': task-4233752, 'name': Rename_Task, 'duration_secs': 0.366339} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.165456] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] Powering on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 633.165742] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2b8f27d4-7566-4a97-97d5-33dafa40848f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.172427] env[69367]: DEBUG oslo_vmware.api [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Waiting for the task: (returnval){ [ 633.172427] env[69367]: value = "task-4233756" [ 633.172427] env[69367]: _type = "Task" [ 633.172427] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.183326] env[69367]: DEBUG oslo_vmware.api [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Task: {'id': task-4233756, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.283409] env[69367]: DEBUG nova.network.neutron [-] [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 633.316024] env[69367]: DEBUG oslo_vmware.api [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Task: {'id': task-4233753, 'name': ReconfigVM_Task, 'duration_secs': 0.337258} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.316024] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] Reconfigured VM instance instance-00000007 to attach disk [datastore2] 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9/3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 633.316024] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1831bceb-88e8-4348-91ed-bcaedde01b9f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.324127] env[69367]: DEBUG oslo_vmware.api [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Waiting for the task: (returnval){ [ 633.324127] env[69367]: value = "task-4233757" [ 633.324127] env[69367]: _type = "Task" [ 633.324127] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.335635] env[69367]: DEBUG oslo_vmware.api [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Task: {'id': task-4233757, 'name': Rename_Task} progress is 5%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.375782] env[69367]: DEBUG oslo_vmware.api [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Task: {'id': task-4233754, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.08925} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.376888] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] [instance: 92bdb1b1-d8ab-46b2-9037-ee8fea4642ce] Extended root virtual disk {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 633.376888] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74cb95b8-5d96-499a-a485-fc6c3fe8e2cf {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.399708] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] [instance: 92bdb1b1-d8ab-46b2-9037-ee8fea4642ce] Reconfiguring VM instance instance-00000008 to attach disk [datastore2] 92bdb1b1-d8ab-46b2-9037-ee8fea4642ce/92bdb1b1-d8ab-46b2-9037-ee8fea4642ce.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 633.400672] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4b543abc-5688-4326-9bbd-dfbbd8283352 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.423249] env[69367]: DEBUG oslo_concurrency.lockutils [req-8bff3d84-2ce2-45f5-893f-3d2e88573d40 req-6c94cccd-0c32-4868-8dd8-2c2ce07d477f service nova] Releasing lock "refresh_cache-3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 633.426939] env[69367]: DEBUG oslo_vmware.api [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Waiting for the task: (returnval){ [ 633.426939] env[69367]: value = "task-4233758" [ 633.426939] env[69367]: _type = "Task" [ 633.426939] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.435092] env[69367]: DEBUG oslo_vmware.api [None req-0bdb360b-0170-4e12-942c-b15ee8c54cb9 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Task: {'id': task-4233755, 'name': PowerOffVM_Task, 'duration_secs': 0.352933} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.435270] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-0bdb360b-0170-4e12-942c-b15ee8c54cb9 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] Powered off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 633.435506] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-0bdb360b-0170-4e12-942c-b15ee8c54cb9 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] Unregistering the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 633.435742] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-27e8fff7-d04c-45f8-9b00-00da2025842a {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.441996] env[69367]: DEBUG oslo_vmware.api [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Task: {'id': task-4233758, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.446694] env[69367]: ERROR nova.scheduler.client.report [None req-3682f758-1480-4596-869e-15f72e534e72 tempest-ServersWithSpecificFlavorTestJSON-1357925327 tempest-ServersWithSpecificFlavorTestJSON-1357925327-project-member] [req-9e47a873-4272-4043-b66f-fdf82b748dc8] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-9e47a873-4272-4043-b66f-fdf82b748dc8"}]} [ 633.447197] env[69367]: DEBUG oslo_concurrency.lockutils [None req-3682f758-1480-4596-869e-15f72e534e72 tempest-ServersWithSpecificFlavorTestJSON-1357925327 tempest-ServersWithSpecificFlavorTestJSON-1357925327-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.295s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 633.448037] env[69367]: ERROR nova.compute.manager [None req-3682f758-1480-4596-869e-15f72e534e72 tempest-ServersWithSpecificFlavorTestJSON-1357925327 tempest-ServersWithSpecificFlavorTestJSON-1357925327-project-member] [instance: f11c0d77-b53c-4d96-820d-bd3ff3a08955] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 633.448037] env[69367]: ERROR nova.compute.manager [instance: f11c0d77-b53c-4d96-820d-bd3ff3a08955] Traceback (most recent call last): [ 633.448037] env[69367]: ERROR nova.compute.manager [instance: f11c0d77-b53c-4d96-820d-bd3ff3a08955] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 633.448037] env[69367]: ERROR nova.compute.manager [instance: f11c0d77-b53c-4d96-820d-bd3ff3a08955] yield [ 633.448037] env[69367]: ERROR nova.compute.manager [instance: f11c0d77-b53c-4d96-820d-bd3ff3a08955] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 633.448037] env[69367]: ERROR nova.compute.manager [instance: f11c0d77-b53c-4d96-820d-bd3ff3a08955] self.set_inventory_for_provider( [ 633.448037] env[69367]: ERROR nova.compute.manager [instance: f11c0d77-b53c-4d96-820d-bd3ff3a08955] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 633.448037] env[69367]: ERROR nova.compute.manager [instance: f11c0d77-b53c-4d96-820d-bd3ff3a08955] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 633.448272] env[69367]: ERROR nova.compute.manager [instance: f11c0d77-b53c-4d96-820d-bd3ff3a08955] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-9e47a873-4272-4043-b66f-fdf82b748dc8"}]} [ 633.448272] env[69367]: ERROR nova.compute.manager [instance: f11c0d77-b53c-4d96-820d-bd3ff3a08955] [ 633.448272] env[69367]: ERROR nova.compute.manager [instance: f11c0d77-b53c-4d96-820d-bd3ff3a08955] During handling of the above exception, another exception occurred: [ 633.448272] env[69367]: ERROR nova.compute.manager [instance: f11c0d77-b53c-4d96-820d-bd3ff3a08955] [ 633.448272] env[69367]: ERROR nova.compute.manager [instance: f11c0d77-b53c-4d96-820d-bd3ff3a08955] Traceback (most recent call last): [ 633.448272] env[69367]: ERROR nova.compute.manager [instance: f11c0d77-b53c-4d96-820d-bd3ff3a08955] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 633.448272] env[69367]: ERROR nova.compute.manager [instance: f11c0d77-b53c-4d96-820d-bd3ff3a08955] with self.rt.instance_claim(context, instance, node, allocs, [ 633.448272] env[69367]: ERROR nova.compute.manager [instance: f11c0d77-b53c-4d96-820d-bd3ff3a08955] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 633.448272] env[69367]: ERROR nova.compute.manager [instance: f11c0d77-b53c-4d96-820d-bd3ff3a08955] return f(*args, **kwargs) [ 633.448583] env[69367]: ERROR nova.compute.manager [instance: f11c0d77-b53c-4d96-820d-bd3ff3a08955] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 633.448583] env[69367]: ERROR nova.compute.manager [instance: f11c0d77-b53c-4d96-820d-bd3ff3a08955] self._update(elevated, cn) [ 633.448583] env[69367]: ERROR nova.compute.manager [instance: f11c0d77-b53c-4d96-820d-bd3ff3a08955] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 633.448583] env[69367]: ERROR nova.compute.manager [instance: f11c0d77-b53c-4d96-820d-bd3ff3a08955] self._update_to_placement(context, compute_node, startup) [ 633.448583] env[69367]: ERROR nova.compute.manager [instance: f11c0d77-b53c-4d96-820d-bd3ff3a08955] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 633.448583] env[69367]: ERROR nova.compute.manager [instance: f11c0d77-b53c-4d96-820d-bd3ff3a08955] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 633.448583] env[69367]: ERROR nova.compute.manager [instance: f11c0d77-b53c-4d96-820d-bd3ff3a08955] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 633.448583] env[69367]: ERROR nova.compute.manager [instance: f11c0d77-b53c-4d96-820d-bd3ff3a08955] return attempt.get(self._wrap_exception) [ 633.448583] env[69367]: ERROR nova.compute.manager [instance: f11c0d77-b53c-4d96-820d-bd3ff3a08955] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 633.448583] env[69367]: ERROR nova.compute.manager [instance: f11c0d77-b53c-4d96-820d-bd3ff3a08955] six.reraise(self.value[0], self.value[1], self.value[2]) [ 633.448583] env[69367]: ERROR nova.compute.manager [instance: f11c0d77-b53c-4d96-820d-bd3ff3a08955] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 633.448583] env[69367]: ERROR nova.compute.manager [instance: f11c0d77-b53c-4d96-820d-bd3ff3a08955] raise value [ 633.448583] env[69367]: ERROR nova.compute.manager [instance: f11c0d77-b53c-4d96-820d-bd3ff3a08955] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 633.448972] env[69367]: ERROR nova.compute.manager [instance: f11c0d77-b53c-4d96-820d-bd3ff3a08955] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 633.448972] env[69367]: ERROR nova.compute.manager [instance: f11c0d77-b53c-4d96-820d-bd3ff3a08955] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 633.448972] env[69367]: ERROR nova.compute.manager [instance: f11c0d77-b53c-4d96-820d-bd3ff3a08955] self.reportclient.update_from_provider_tree( [ 633.448972] env[69367]: ERROR nova.compute.manager [instance: f11c0d77-b53c-4d96-820d-bd3ff3a08955] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 633.448972] env[69367]: ERROR nova.compute.manager [instance: f11c0d77-b53c-4d96-820d-bd3ff3a08955] with catch_all(pd.uuid): [ 633.448972] env[69367]: ERROR nova.compute.manager [instance: f11c0d77-b53c-4d96-820d-bd3ff3a08955] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 633.448972] env[69367]: ERROR nova.compute.manager [instance: f11c0d77-b53c-4d96-820d-bd3ff3a08955] self.gen.throw(typ, value, traceback) [ 633.448972] env[69367]: ERROR nova.compute.manager [instance: f11c0d77-b53c-4d96-820d-bd3ff3a08955] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 633.448972] env[69367]: ERROR nova.compute.manager [instance: f11c0d77-b53c-4d96-820d-bd3ff3a08955] raise exception.ResourceProviderSyncFailed() [ 633.448972] env[69367]: ERROR nova.compute.manager [instance: f11c0d77-b53c-4d96-820d-bd3ff3a08955] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 633.448972] env[69367]: ERROR nova.compute.manager [instance: f11c0d77-b53c-4d96-820d-bd3ff3a08955] [ 633.449310] env[69367]: DEBUG nova.compute.utils [None req-3682f758-1480-4596-869e-15f72e534e72 tempest-ServersWithSpecificFlavorTestJSON-1357925327 tempest-ServersWithSpecificFlavorTestJSON-1357925327-project-member] [instance: f11c0d77-b53c-4d96-820d-bd3ff3a08955] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 633.450448] env[69367]: DEBUG oslo_concurrency.lockutils [None req-7c66dfce-abbe-4ff0-97b3-5ac8ccf941b4 tempest-ServerAddressesTestJSON-1140368918 tempest-ServerAddressesTestJSON-1140368918-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.329s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 633.452469] env[69367]: INFO nova.compute.claims [None req-7c66dfce-abbe-4ff0-97b3-5ac8ccf941b4 tempest-ServerAddressesTestJSON-1140368918 tempest-ServerAddressesTestJSON-1140368918-project-member] [instance: 07a65426-e348-4f6f-8898-45409e15c554] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 633.455899] env[69367]: DEBUG nova.compute.manager [None req-3682f758-1480-4596-869e-15f72e534e72 tempest-ServersWithSpecificFlavorTestJSON-1357925327 tempest-ServersWithSpecificFlavorTestJSON-1357925327-project-member] [instance: f11c0d77-b53c-4d96-820d-bd3ff3a08955] Build of instance f11c0d77-b53c-4d96-820d-bd3ff3a08955 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 633.459256] env[69367]: DEBUG nova.compute.manager [None req-3682f758-1480-4596-869e-15f72e534e72 tempest-ServersWithSpecificFlavorTestJSON-1357925327 tempest-ServersWithSpecificFlavorTestJSON-1357925327-project-member] [instance: f11c0d77-b53c-4d96-820d-bd3ff3a08955] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 633.459256] env[69367]: DEBUG oslo_concurrency.lockutils [None req-3682f758-1480-4596-869e-15f72e534e72 tempest-ServersWithSpecificFlavorTestJSON-1357925327 tempest-ServersWithSpecificFlavorTestJSON-1357925327-project-member] Acquiring lock "refresh_cache-f11c0d77-b53c-4d96-820d-bd3ff3a08955" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 633.459256] env[69367]: DEBUG oslo_concurrency.lockutils [None req-3682f758-1480-4596-869e-15f72e534e72 tempest-ServersWithSpecificFlavorTestJSON-1357925327 tempest-ServersWithSpecificFlavorTestJSON-1357925327-project-member] Acquired lock "refresh_cache-f11c0d77-b53c-4d96-820d-bd3ff3a08955" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 633.459256] env[69367]: DEBUG nova.network.neutron [None req-3682f758-1480-4596-869e-15f72e534e72 tempest-ServersWithSpecificFlavorTestJSON-1357925327 tempest-ServersWithSpecificFlavorTestJSON-1357925327-project-member] [instance: f11c0d77-b53c-4d96-820d-bd3ff3a08955] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 633.471407] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-0bdb360b-0170-4e12-942c-b15ee8c54cb9 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] Unregistered the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 633.471407] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-0bdb360b-0170-4e12-942c-b15ee8c54cb9 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] Deleting contents of the VM from datastore datastore1 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 633.471705] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-0bdb360b-0170-4e12-942c-b15ee8c54cb9 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Deleting the datastore file [datastore1] a358ce6d-9826-4ddb-8c2f-51bac8db59d4 {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 633.471855] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a4699c83-0faa-41de-905a-1519e132d38e {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.482359] env[69367]: DEBUG oslo_vmware.api [None req-0bdb360b-0170-4e12-942c-b15ee8c54cb9 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Waiting for the task: (returnval){ [ 633.482359] env[69367]: value = "task-4233760" [ 633.482359] env[69367]: _type = "Task" [ 633.482359] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.491222] env[69367]: DEBUG oslo_vmware.api [None req-0bdb360b-0170-4e12-942c-b15ee8c54cb9 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Task: {'id': task-4233760, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.684957] env[69367]: DEBUG oslo_vmware.api [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Task: {'id': task-4233756, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.704225] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Acquiring lock "fa4a5dbc-b885-4439-8520-0bfff38438b3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 633.704665] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Lock "fa4a5dbc-b885-4439-8520-0bfff38438b3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 633.786853] env[69367]: INFO nova.compute.manager [-] [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] Took 1.78 seconds to deallocate network for instance. [ 633.838349] env[69367]: DEBUG oslo_vmware.api [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Task: {'id': task-4233757, 'name': Rename_Task, 'duration_secs': 0.175478} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.842029] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] Powering on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 633.842029] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9e24de8a-2992-443a-8eef-a339dc05c389 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.846341] env[69367]: DEBUG oslo_vmware.api [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Waiting for the task: (returnval){ [ 633.846341] env[69367]: value = "task-4233761" [ 633.846341] env[69367]: _type = "Task" [ 633.846341] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.865594] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Powering off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 633.865966] env[69367]: DEBUG oslo_vmware.api [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Task: {'id': task-4233761, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.866277] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bf177c6f-980a-47a1-bbfc-e4b7d871fbe2 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.875406] env[69367]: DEBUG oslo_vmware.api [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Waiting for the task: (returnval){ [ 633.875406] env[69367]: value = "task-4233762" [ 633.875406] env[69367]: _type = "Task" [ 633.875406] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.887422] env[69367]: DEBUG oslo_vmware.api [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Task: {'id': task-4233762, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.945097] env[69367]: DEBUG oslo_vmware.api [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Task: {'id': task-4233758, 'name': ReconfigVM_Task, 'duration_secs': 0.3426} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.945097] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] [instance: 92bdb1b1-d8ab-46b2-9037-ee8fea4642ce] Reconfigured VM instance instance-00000008 to attach disk [datastore2] 92bdb1b1-d8ab-46b2-9037-ee8fea4642ce/92bdb1b1-d8ab-46b2-9037-ee8fea4642ce.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 633.947270] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2e82c8a5-7958-4a10-9e7b-581fdb0b5619 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.955170] env[69367]: DEBUG oslo_vmware.api [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Waiting for the task: (returnval){ [ 633.955170] env[69367]: value = "task-4233763" [ 633.955170] env[69367]: _type = "Task" [ 633.955170] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.971560] env[69367]: DEBUG oslo_vmware.api [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Task: {'id': task-4233763, 'name': Rename_Task} progress is 5%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.994955] env[69367]: DEBUG oslo_vmware.api [None req-0bdb360b-0170-4e12-942c-b15ee8c54cb9 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Task: {'id': task-4233760, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.192039} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.996457] env[69367]: DEBUG nova.network.neutron [None req-3682f758-1480-4596-869e-15f72e534e72 tempest-ServersWithSpecificFlavorTestJSON-1357925327 tempest-ServersWithSpecificFlavorTestJSON-1357925327-project-member] [instance: f11c0d77-b53c-4d96-820d-bd3ff3a08955] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 633.999713] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-0bdb360b-0170-4e12-942c-b15ee8c54cb9 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Deleted the datastore file {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 633.999713] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-0bdb360b-0170-4e12-942c-b15ee8c54cb9 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] Deleted contents of the VM from datastore datastore1 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 633.999713] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-0bdb360b-0170-4e12-942c-b15ee8c54cb9 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] Instance destroyed {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 634.000233] env[69367]: INFO nova.compute.manager [None req-0bdb360b-0170-4e12-942c-b15ee8c54cb9 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] Took 1.12 seconds to destroy the instance on the hypervisor. [ 634.000653] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0bdb360b-0170-4e12-942c-b15ee8c54cb9 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 634.001190] env[69367]: DEBUG nova.compute.manager [-] [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 634.001418] env[69367]: DEBUG nova.network.neutron [-] [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 634.042017] env[69367]: DEBUG nova.network.neutron [-] [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 634.132767] env[69367]: DEBUG nova.network.neutron [None req-3682f758-1480-4596-869e-15f72e534e72 tempest-ServersWithSpecificFlavorTestJSON-1357925327 tempest-ServersWithSpecificFlavorTestJSON-1357925327-project-member] [instance: f11c0d77-b53c-4d96-820d-bd3ff3a08955] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 634.184497] env[69367]: DEBUG oslo_vmware.api [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Task: {'id': task-4233756, 'name': PowerOnVM_Task, 'duration_secs': 0.781214} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 634.184998] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] Powered on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 634.185296] env[69367]: INFO nova.compute.manager [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] Took 14.70 seconds to spawn the instance on the hypervisor. [ 634.185446] env[69367]: DEBUG nova.compute.manager [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] Checking state {{(pid=69367) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 634.186273] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19ef4561-ec1d-441f-8679-1b56015f1b2b {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.241711] env[69367]: DEBUG nova.compute.manager [None req-c5086775-da23-4bb0-a096-5f5c5477dfe1 tempest-ServerDiagnosticsV248Test-1087128128 tempest-ServerDiagnosticsV248Test-1087128128-project-admin] [instance: 937c05e9-06f1-4a5f-9f8c-ac40c262ce4e] Checking state {{(pid=69367) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 634.243729] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c96269d0-7cba-4182-b5ca-bf13b3205997 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.254152] env[69367]: INFO nova.compute.manager [None req-c5086775-da23-4bb0-a096-5f5c5477dfe1 tempest-ServerDiagnosticsV248Test-1087128128 tempest-ServerDiagnosticsV248Test-1087128128-project-admin] [instance: 937c05e9-06f1-4a5f-9f8c-ac40c262ce4e] Retrieving diagnostics [ 634.255361] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4074016a-a987-44d5-b109-a1bcc7f3b36a {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.301274] env[69367]: DEBUG oslo_concurrency.lockutils [None req-7b94712f-24c6-4e7c-bb81-62b66d431601 tempest-DeleteServersAdminTestJSON-676728523 tempest-DeleteServersAdminTestJSON-676728523-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 634.304240] env[69367]: DEBUG oslo_concurrency.lockutils [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Acquiring lock "ab365570-ac29-4094-be4c-d49563a465c8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 634.304579] env[69367]: DEBUG oslo_concurrency.lockutils [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Lock "ab365570-ac29-4094-be4c-d49563a465c8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 634.340052] env[69367]: DEBUG oslo_concurrency.lockutils [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Acquiring lock "c17525ee-d038-4c81-932b-ed74a6de6cb5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 634.340294] env[69367]: DEBUG oslo_concurrency.lockutils [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Lock "c17525ee-d038-4c81-932b-ed74a6de6cb5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 634.359288] env[69367]: DEBUG oslo_vmware.api [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Task: {'id': task-4233761, 'name': PowerOnVM_Task} progress is 87%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.385991] env[69367]: DEBUG oslo_vmware.api [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Task: {'id': task-4233762, 'name': PowerOffVM_Task, 'duration_secs': 0.19236} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 634.386299] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Powered off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 634.386625] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Destroying instance {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 634.387736] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a87a374-bd92-4d4e-b82e-234c180067eb {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.398037] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Unregistering the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 634.402021] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-32cb14cf-0842-40f9-b54d-cac553bde000 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.429821] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Unregistered the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 634.430171] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Deleting contents of the VM from datastore datastore1 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 634.430352] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Deleting the datastore file [datastore1] 4e346ed1-36e9-421d-975f-e8bb6f05c0a0 {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 634.430674] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8cb6d286-fe21-443a-a619-b3a2a23830f5 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.438486] env[69367]: DEBUG oslo_vmware.api [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Waiting for the task: (returnval){ [ 634.438486] env[69367]: value = "task-4233765" [ 634.438486] env[69367]: _type = "Task" [ 634.438486] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.448767] env[69367]: DEBUG oslo_vmware.api [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Task: {'id': task-4233765, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.468677] env[69367]: DEBUG oslo_vmware.api [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Task: {'id': task-4233763, 'name': Rename_Task, 'duration_secs': 0.252557} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 634.469215] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] [instance: 92bdb1b1-d8ab-46b2-9037-ee8fea4642ce] Powering on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 634.469822] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6fca62d8-c583-4e79-b3ea-7cd55fc6c56f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.478815] env[69367]: DEBUG oslo_vmware.api [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Waiting for the task: (returnval){ [ 634.478815] env[69367]: value = "task-4233766" [ 634.478815] env[69367]: _type = "Task" [ 634.478815] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.486697] env[69367]: DEBUG nova.scheduler.client.report [None req-7c66dfce-abbe-4ff0-97b3-5ac8ccf941b4 tempest-ServerAddressesTestJSON-1140368918 tempest-ServerAddressesTestJSON-1140368918-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 634.493323] env[69367]: DEBUG oslo_vmware.api [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Task: {'id': task-4233766, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.519487] env[69367]: DEBUG nova.scheduler.client.report [None req-7c66dfce-abbe-4ff0-97b3-5ac8ccf941b4 tempest-ServerAddressesTestJSON-1140368918 tempest-ServerAddressesTestJSON-1140368918-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 634.520030] env[69367]: DEBUG nova.compute.provider_tree [None req-7c66dfce-abbe-4ff0-97b3-5ac8ccf941b4 tempest-ServerAddressesTestJSON-1140368918 tempest-ServerAddressesTestJSON-1140368918-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 634.546060] env[69367]: DEBUG nova.network.neutron [-] [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 634.549334] env[69367]: DEBUG nova.scheduler.client.report [None req-7c66dfce-abbe-4ff0-97b3-5ac8ccf941b4 tempest-ServerAddressesTestJSON-1140368918 tempest-ServerAddressesTestJSON-1140368918-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 634.606040] env[69367]: DEBUG nova.scheduler.client.report [None req-7c66dfce-abbe-4ff0-97b3-5ac8ccf941b4 tempest-ServerAddressesTestJSON-1140368918 tempest-ServerAddressesTestJSON-1140368918-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 634.636715] env[69367]: DEBUG oslo_concurrency.lockutils [None req-3682f758-1480-4596-869e-15f72e534e72 tempest-ServersWithSpecificFlavorTestJSON-1357925327 tempest-ServersWithSpecificFlavorTestJSON-1357925327-project-member] Releasing lock "refresh_cache-f11c0d77-b53c-4d96-820d-bd3ff3a08955" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 634.637044] env[69367]: DEBUG nova.compute.manager [None req-3682f758-1480-4596-869e-15f72e534e72 tempest-ServersWithSpecificFlavorTestJSON-1357925327 tempest-ServersWithSpecificFlavorTestJSON-1357925327-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 634.637274] env[69367]: DEBUG nova.compute.manager [None req-3682f758-1480-4596-869e-15f72e534e72 tempest-ServersWithSpecificFlavorTestJSON-1357925327 tempest-ServersWithSpecificFlavorTestJSON-1357925327-project-member] [instance: f11c0d77-b53c-4d96-820d-bd3ff3a08955] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 634.637498] env[69367]: DEBUG nova.network.neutron [None req-3682f758-1480-4596-869e-15f72e534e72 tempest-ServersWithSpecificFlavorTestJSON-1357925327 tempest-ServersWithSpecificFlavorTestJSON-1357925327-project-member] [instance: f11c0d77-b53c-4d96-820d-bd3ff3a08955] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 634.666163] env[69367]: DEBUG nova.network.neutron [None req-3682f758-1480-4596-869e-15f72e534e72 tempest-ServersWithSpecificFlavorTestJSON-1357925327 tempest-ServersWithSpecificFlavorTestJSON-1357925327-project-member] [instance: f11c0d77-b53c-4d96-820d-bd3ff3a08955] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 634.708746] env[69367]: INFO nova.compute.manager [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] Took 27.80 seconds to build instance. [ 634.864190] env[69367]: DEBUG oslo_vmware.api [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Task: {'id': task-4233761, 'name': PowerOnVM_Task, 'duration_secs': 0.736569} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 634.864190] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] Powered on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 634.864190] env[69367]: INFO nova.compute.manager [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] Took 12.96 seconds to spawn the instance on the hypervisor. [ 634.864190] env[69367]: DEBUG nova.compute.manager [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] Checking state {{(pid=69367) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 634.864190] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32f9c710-4660-4bd3-9d7f-2178b110f133 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.957371] env[69367]: DEBUG oslo_vmware.api [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Task: {'id': task-4233765, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.171474} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 634.957650] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Deleted the datastore file {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 634.957864] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Deleted contents of the VM from datastore datastore1 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 634.958200] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Instance destroyed {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 634.998588] env[69367]: DEBUG oslo_vmware.api [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Task: {'id': task-4233766, 'name': PowerOnVM_Task} progress is 81%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.009638] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5de5b76-d20d-4873-ab34-2d86462f1561 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.022209] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69e61432-c976-4bd2-8892-7aabfcdfcd1e {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.057806] env[69367]: INFO nova.compute.manager [-] [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] Took 1.06 seconds to deallocate network for instance. [ 635.060843] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93fc8aaa-6a33-4817-ae5e-31dad4fc678a {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.074775] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-609098ad-8b3c-4b15-9cb4-27ddfbd37447 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.094899] env[69367]: DEBUG nova.compute.provider_tree [None req-7c66dfce-abbe-4ff0-97b3-5ac8ccf941b4 tempest-ServerAddressesTestJSON-1140368918 tempest-ServerAddressesTestJSON-1140368918-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 635.146870] env[69367]: DEBUG oslo_concurrency.lockutils [None req-60cb02e4-845c-48dc-b93d-4834b21efeca tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Acquiring lock "1302cad6-55b7-4905-92c1-dfdd37042e30" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 635.147468] env[69367]: DEBUG oslo_concurrency.lockutils [None req-60cb02e4-845c-48dc-b93d-4834b21efeca tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Lock "1302cad6-55b7-4905-92c1-dfdd37042e30" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 635.147468] env[69367]: DEBUG oslo_concurrency.lockutils [None req-60cb02e4-845c-48dc-b93d-4834b21efeca tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Acquiring lock "1302cad6-55b7-4905-92c1-dfdd37042e30-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 635.147615] env[69367]: DEBUG oslo_concurrency.lockutils [None req-60cb02e4-845c-48dc-b93d-4834b21efeca tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Lock "1302cad6-55b7-4905-92c1-dfdd37042e30-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 635.148091] env[69367]: DEBUG oslo_concurrency.lockutils [None req-60cb02e4-845c-48dc-b93d-4834b21efeca tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Lock "1302cad6-55b7-4905-92c1-dfdd37042e30-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 635.150243] env[69367]: INFO nova.compute.manager [None req-60cb02e4-845c-48dc-b93d-4834b21efeca tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] [instance: 1302cad6-55b7-4905-92c1-dfdd37042e30] Terminating instance [ 635.168511] env[69367]: DEBUG nova.network.neutron [None req-3682f758-1480-4596-869e-15f72e534e72 tempest-ServersWithSpecificFlavorTestJSON-1357925327 tempest-ServersWithSpecificFlavorTestJSON-1357925327-project-member] [instance: f11c0d77-b53c-4d96-820d-bd3ff3a08955] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 635.211561] env[69367]: DEBUG oslo_concurrency.lockutils [None req-bc7c8f34-4eb1-4127-ba79-caae84f4836e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Lock "e1c7d100-4ad7-4871-970f-bb7562bfc6fc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.315s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 635.387061] env[69367]: INFO nova.compute.manager [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] Took 27.57 seconds to build instance. [ 635.500622] env[69367]: DEBUG oslo_vmware.api [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Task: {'id': task-4233766, 'name': PowerOnVM_Task, 'duration_secs': 0.925648} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 635.500898] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] [instance: 92bdb1b1-d8ab-46b2-9037-ee8fea4642ce] Powered on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 635.503493] env[69367]: INFO nova.compute.manager [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] [instance: 92bdb1b1-d8ab-46b2-9037-ee8fea4642ce] Took 11.22 seconds to spawn the instance on the hypervisor. [ 635.503894] env[69367]: DEBUG nova.compute.manager [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] [instance: 92bdb1b1-d8ab-46b2-9037-ee8fea4642ce] Checking state {{(pid=69367) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 635.505036] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d298565-1547-4fd7-8bcb-b8a6eb63fd0c {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.516448] env[69367]: DEBUG nova.compute.manager [req-e7b5d759-fa19-4554-9e60-5d1144b4486a req-2c4068e8-0139-432a-b1c9-38f19f89c475 service nova] [instance: 92bdb1b1-d8ab-46b2-9037-ee8fea4642ce] Received event network-changed-a7fe18e3-9f20-481e-b223-1b2907709041 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 635.516654] env[69367]: DEBUG nova.compute.manager [req-e7b5d759-fa19-4554-9e60-5d1144b4486a req-2c4068e8-0139-432a-b1c9-38f19f89c475 service nova] [instance: 92bdb1b1-d8ab-46b2-9037-ee8fea4642ce] Refreshing instance network info cache due to event network-changed-a7fe18e3-9f20-481e-b223-1b2907709041. {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11772}} [ 635.516837] env[69367]: DEBUG oslo_concurrency.lockutils [req-e7b5d759-fa19-4554-9e60-5d1144b4486a req-2c4068e8-0139-432a-b1c9-38f19f89c475 service nova] Acquiring lock "refresh_cache-92bdb1b1-d8ab-46b2-9037-ee8fea4642ce" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 635.516976] env[69367]: DEBUG oslo_concurrency.lockutils [req-e7b5d759-fa19-4554-9e60-5d1144b4486a req-2c4068e8-0139-432a-b1c9-38f19f89c475 service nova] Acquired lock "refresh_cache-92bdb1b1-d8ab-46b2-9037-ee8fea4642ce" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 635.517154] env[69367]: DEBUG nova.network.neutron [req-e7b5d759-fa19-4554-9e60-5d1144b4486a req-2c4068e8-0139-432a-b1c9-38f19f89c475 service nova] [instance: 92bdb1b1-d8ab-46b2-9037-ee8fea4642ce] Refreshing network info cache for port a7fe18e3-9f20-481e-b223-1b2907709041 {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 635.570807] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0bdb360b-0170-4e12-942c-b15ee8c54cb9 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 635.620978] env[69367]: ERROR nova.scheduler.client.report [None req-7c66dfce-abbe-4ff0-97b3-5ac8ccf941b4 tempest-ServerAddressesTestJSON-1140368918 tempest-ServerAddressesTestJSON-1140368918-project-member] [req-a38f6dc5-eafb-4de7-853a-247b33e2b9f0] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-a38f6dc5-eafb-4de7-853a-247b33e2b9f0"}]} [ 635.621728] env[69367]: DEBUG oslo_concurrency.lockutils [None req-7c66dfce-abbe-4ff0-97b3-5ac8ccf941b4 tempest-ServerAddressesTestJSON-1140368918 tempest-ServerAddressesTestJSON-1140368918-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.171s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 635.622857] env[69367]: ERROR nova.compute.manager [None req-7c66dfce-abbe-4ff0-97b3-5ac8ccf941b4 tempest-ServerAddressesTestJSON-1140368918 tempest-ServerAddressesTestJSON-1140368918-project-member] [instance: 07a65426-e348-4f6f-8898-45409e15c554] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 635.622857] env[69367]: ERROR nova.compute.manager [instance: 07a65426-e348-4f6f-8898-45409e15c554] Traceback (most recent call last): [ 635.622857] env[69367]: ERROR nova.compute.manager [instance: 07a65426-e348-4f6f-8898-45409e15c554] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 635.622857] env[69367]: ERROR nova.compute.manager [instance: 07a65426-e348-4f6f-8898-45409e15c554] yield [ 635.622857] env[69367]: ERROR nova.compute.manager [instance: 07a65426-e348-4f6f-8898-45409e15c554] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 635.622857] env[69367]: ERROR nova.compute.manager [instance: 07a65426-e348-4f6f-8898-45409e15c554] self.set_inventory_for_provider( [ 635.622857] env[69367]: ERROR nova.compute.manager [instance: 07a65426-e348-4f6f-8898-45409e15c554] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 635.622857] env[69367]: ERROR nova.compute.manager [instance: 07a65426-e348-4f6f-8898-45409e15c554] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 635.623302] env[69367]: ERROR nova.compute.manager [instance: 07a65426-e348-4f6f-8898-45409e15c554] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-a38f6dc5-eafb-4de7-853a-247b33e2b9f0"}]} [ 635.623302] env[69367]: ERROR nova.compute.manager [instance: 07a65426-e348-4f6f-8898-45409e15c554] [ 635.623302] env[69367]: ERROR nova.compute.manager [instance: 07a65426-e348-4f6f-8898-45409e15c554] During handling of the above exception, another exception occurred: [ 635.623302] env[69367]: ERROR nova.compute.manager [instance: 07a65426-e348-4f6f-8898-45409e15c554] [ 635.623302] env[69367]: ERROR nova.compute.manager [instance: 07a65426-e348-4f6f-8898-45409e15c554] Traceback (most recent call last): [ 635.623302] env[69367]: ERROR nova.compute.manager [instance: 07a65426-e348-4f6f-8898-45409e15c554] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 635.623302] env[69367]: ERROR nova.compute.manager [instance: 07a65426-e348-4f6f-8898-45409e15c554] with self.rt.instance_claim(context, instance, node, allocs, [ 635.623302] env[69367]: ERROR nova.compute.manager [instance: 07a65426-e348-4f6f-8898-45409e15c554] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 635.623302] env[69367]: ERROR nova.compute.manager [instance: 07a65426-e348-4f6f-8898-45409e15c554] return f(*args, **kwargs) [ 635.623615] env[69367]: ERROR nova.compute.manager [instance: 07a65426-e348-4f6f-8898-45409e15c554] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 635.623615] env[69367]: ERROR nova.compute.manager [instance: 07a65426-e348-4f6f-8898-45409e15c554] self._update(elevated, cn) [ 635.623615] env[69367]: ERROR nova.compute.manager [instance: 07a65426-e348-4f6f-8898-45409e15c554] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 635.623615] env[69367]: ERROR nova.compute.manager [instance: 07a65426-e348-4f6f-8898-45409e15c554] self._update_to_placement(context, compute_node, startup) [ 635.623615] env[69367]: ERROR nova.compute.manager [instance: 07a65426-e348-4f6f-8898-45409e15c554] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 635.623615] env[69367]: ERROR nova.compute.manager [instance: 07a65426-e348-4f6f-8898-45409e15c554] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 635.623615] env[69367]: ERROR nova.compute.manager [instance: 07a65426-e348-4f6f-8898-45409e15c554] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 635.623615] env[69367]: ERROR nova.compute.manager [instance: 07a65426-e348-4f6f-8898-45409e15c554] return attempt.get(self._wrap_exception) [ 635.623615] env[69367]: ERROR nova.compute.manager [instance: 07a65426-e348-4f6f-8898-45409e15c554] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 635.623615] env[69367]: ERROR nova.compute.manager [instance: 07a65426-e348-4f6f-8898-45409e15c554] six.reraise(self.value[0], self.value[1], self.value[2]) [ 635.623615] env[69367]: ERROR nova.compute.manager [instance: 07a65426-e348-4f6f-8898-45409e15c554] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 635.623615] env[69367]: ERROR nova.compute.manager [instance: 07a65426-e348-4f6f-8898-45409e15c554] raise value [ 635.623615] env[69367]: ERROR nova.compute.manager [instance: 07a65426-e348-4f6f-8898-45409e15c554] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 635.623957] env[69367]: ERROR nova.compute.manager [instance: 07a65426-e348-4f6f-8898-45409e15c554] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 635.623957] env[69367]: ERROR nova.compute.manager [instance: 07a65426-e348-4f6f-8898-45409e15c554] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 635.623957] env[69367]: ERROR nova.compute.manager [instance: 07a65426-e348-4f6f-8898-45409e15c554] self.reportclient.update_from_provider_tree( [ 635.623957] env[69367]: ERROR nova.compute.manager [instance: 07a65426-e348-4f6f-8898-45409e15c554] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 635.623957] env[69367]: ERROR nova.compute.manager [instance: 07a65426-e348-4f6f-8898-45409e15c554] with catch_all(pd.uuid): [ 635.623957] env[69367]: ERROR nova.compute.manager [instance: 07a65426-e348-4f6f-8898-45409e15c554] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 635.623957] env[69367]: ERROR nova.compute.manager [instance: 07a65426-e348-4f6f-8898-45409e15c554] self.gen.throw(typ, value, traceback) [ 635.623957] env[69367]: ERROR nova.compute.manager [instance: 07a65426-e348-4f6f-8898-45409e15c554] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 635.623957] env[69367]: ERROR nova.compute.manager [instance: 07a65426-e348-4f6f-8898-45409e15c554] raise exception.ResourceProviderSyncFailed() [ 635.623957] env[69367]: ERROR nova.compute.manager [instance: 07a65426-e348-4f6f-8898-45409e15c554] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 635.623957] env[69367]: ERROR nova.compute.manager [instance: 07a65426-e348-4f6f-8898-45409e15c554] [ 635.626527] env[69367]: DEBUG nova.compute.utils [None req-7c66dfce-abbe-4ff0-97b3-5ac8ccf941b4 tempest-ServerAddressesTestJSON-1140368918 tempest-ServerAddressesTestJSON-1140368918-project-member] [instance: 07a65426-e348-4f6f-8898-45409e15c554] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 635.627753] env[69367]: DEBUG oslo_concurrency.lockutils [None req-af1ecc9b-a257-426e-95f0-d09e8a0d0989 tempest-ServersTestBootFromVolume-945135387 tempest-ServersTestBootFromVolume-945135387-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.985s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 635.629732] env[69367]: INFO nova.compute.claims [None req-af1ecc9b-a257-426e-95f0-d09e8a0d0989 tempest-ServersTestBootFromVolume-945135387 tempest-ServersTestBootFromVolume-945135387-project-member] [instance: 1df0055c-938e-4048-938c-37590b0138ac] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 635.632227] env[69367]: DEBUG nova.compute.manager [None req-7c66dfce-abbe-4ff0-97b3-5ac8ccf941b4 tempest-ServerAddressesTestJSON-1140368918 tempest-ServerAddressesTestJSON-1140368918-project-member] [instance: 07a65426-e348-4f6f-8898-45409e15c554] Build of instance 07a65426-e348-4f6f-8898-45409e15c554 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 635.633296] env[69367]: DEBUG nova.compute.manager [None req-7c66dfce-abbe-4ff0-97b3-5ac8ccf941b4 tempest-ServerAddressesTestJSON-1140368918 tempest-ServerAddressesTestJSON-1140368918-project-member] [instance: 07a65426-e348-4f6f-8898-45409e15c554] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 635.633296] env[69367]: DEBUG oslo_concurrency.lockutils [None req-7c66dfce-abbe-4ff0-97b3-5ac8ccf941b4 tempest-ServerAddressesTestJSON-1140368918 tempest-ServerAddressesTestJSON-1140368918-project-member] Acquiring lock "refresh_cache-07a65426-e348-4f6f-8898-45409e15c554" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 635.633296] env[69367]: DEBUG oslo_concurrency.lockutils [None req-7c66dfce-abbe-4ff0-97b3-5ac8ccf941b4 tempest-ServerAddressesTestJSON-1140368918 tempest-ServerAddressesTestJSON-1140368918-project-member] Acquired lock "refresh_cache-07a65426-e348-4f6f-8898-45409e15c554" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 635.633296] env[69367]: DEBUG nova.network.neutron [None req-7c66dfce-abbe-4ff0-97b3-5ac8ccf941b4 tempest-ServerAddressesTestJSON-1140368918 tempest-ServerAddressesTestJSON-1140368918-project-member] [instance: 07a65426-e348-4f6f-8898-45409e15c554] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 635.655952] env[69367]: DEBUG nova.compute.manager [None req-60cb02e4-845c-48dc-b93d-4834b21efeca tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] [instance: 1302cad6-55b7-4905-92c1-dfdd37042e30] Start destroying the instance on the hypervisor. {{(pid=69367) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 635.656217] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-60cb02e4-845c-48dc-b93d-4834b21efeca tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] [instance: 1302cad6-55b7-4905-92c1-dfdd37042e30] Destroying instance {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 635.657429] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e1c983b-44b3-485e-abf0-89862b9709bf {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.669382] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-60cb02e4-845c-48dc-b93d-4834b21efeca tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] [instance: 1302cad6-55b7-4905-92c1-dfdd37042e30] Powering off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 635.669680] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ceb7b8c4-c759-4d5d-8923-0cdb838e73b7 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.672511] env[69367]: INFO nova.compute.manager [None req-3682f758-1480-4596-869e-15f72e534e72 tempest-ServersWithSpecificFlavorTestJSON-1357925327 tempest-ServersWithSpecificFlavorTestJSON-1357925327-project-member] [instance: f11c0d77-b53c-4d96-820d-bd3ff3a08955] Took 1.03 seconds to deallocate network for instance. [ 635.683155] env[69367]: DEBUG oslo_vmware.api [None req-60cb02e4-845c-48dc-b93d-4834b21efeca tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Waiting for the task: (returnval){ [ 635.683155] env[69367]: value = "task-4233767" [ 635.683155] env[69367]: _type = "Task" [ 635.683155] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 635.694683] env[69367]: DEBUG oslo_vmware.api [None req-60cb02e4-845c-48dc-b93d-4834b21efeca tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Task: {'id': task-4233767, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.715373] env[69367]: DEBUG nova.compute.manager [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 635.890262] env[69367]: DEBUG oslo_concurrency.lockutils [None req-54948762-f0b7-4641-9578-c30f1e749672 tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Lock "3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.085s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 636.001482] env[69367]: DEBUG nova.virt.hardware [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-19T12:49:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-19T12:49:28Z,direct_url=,disk_format='vmdk',id=2b099420-9152-4d93-9609-4c9317824c11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cd7c200d5cd6461fb951580f8c764c42',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-19T12:49:29Z,virtual_size=,visibility=), allow threads: False {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 636.001759] env[69367]: DEBUG nova.virt.hardware [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Flavor limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 636.001902] env[69367]: DEBUG nova.virt.hardware [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Image limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 636.002123] env[69367]: DEBUG nova.virt.hardware [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Flavor pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 636.002253] env[69367]: DEBUG nova.virt.hardware [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Image pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 636.002450] env[69367]: DEBUG nova.virt.hardware [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 636.002603] env[69367]: DEBUG nova.virt.hardware [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 636.002770] env[69367]: DEBUG nova.virt.hardware [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 636.002944] env[69367]: DEBUG nova.virt.hardware [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Got 1 possible topologies {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 636.003820] env[69367]: DEBUG nova.virt.hardware [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 636.004267] env[69367]: DEBUG nova.virt.hardware [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 636.005188] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab6f5e89-8a5b-43b6-92a2-3235cd943e9a {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.015696] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f2f4e44-956c-4ff3-bd89-5d74cd213981 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.036193] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Instance VIF info [] {{(pid=69367) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 636.042624] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 636.044790] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Creating VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 636.045380] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c2ced011-ca83-46e6-9ea4-fac3d022d8c2 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.060508] env[69367]: INFO nova.compute.manager [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] [instance: 92bdb1b1-d8ab-46b2-9037-ee8fea4642ce] Took 26.98 seconds to build instance. [ 636.070593] env[69367]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 636.070593] env[69367]: value = "task-4233768" [ 636.070593] env[69367]: _type = "Task" [ 636.070593] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 636.079044] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4233768, 'name': CreateVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.183135] env[69367]: DEBUG nova.network.neutron [None req-7c66dfce-abbe-4ff0-97b3-5ac8ccf941b4 tempest-ServerAddressesTestJSON-1140368918 tempest-ServerAddressesTestJSON-1140368918-project-member] [instance: 07a65426-e348-4f6f-8898-45409e15c554] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 636.196990] env[69367]: DEBUG oslo_vmware.api [None req-60cb02e4-845c-48dc-b93d-4834b21efeca tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Task: {'id': task-4233767, 'name': PowerOffVM_Task, 'duration_secs': 0.268816} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 636.198188] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-60cb02e4-845c-48dc-b93d-4834b21efeca tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] [instance: 1302cad6-55b7-4905-92c1-dfdd37042e30] Powered off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 636.198437] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-60cb02e4-845c-48dc-b93d-4834b21efeca tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] [instance: 1302cad6-55b7-4905-92c1-dfdd37042e30] Unregistering the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 636.198730] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ebc3d386-57c6-4c9b-a396-495566096423 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.248304] env[69367]: DEBUG oslo_concurrency.lockutils [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 636.282538] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-60cb02e4-845c-48dc-b93d-4834b21efeca tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] [instance: 1302cad6-55b7-4905-92c1-dfdd37042e30] Unregistered the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 636.282842] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-60cb02e4-845c-48dc-b93d-4834b21efeca tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] [instance: 1302cad6-55b7-4905-92c1-dfdd37042e30] Deleting contents of the VM from datastore datastore2 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 636.283011] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-60cb02e4-845c-48dc-b93d-4834b21efeca tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Deleting the datastore file [datastore2] 1302cad6-55b7-4905-92c1-dfdd37042e30 {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 636.283910] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2095d9e6-d0b1-43b2-b874-4a686e2f572b {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.295390] env[69367]: DEBUG oslo_vmware.api [None req-60cb02e4-845c-48dc-b93d-4834b21efeca tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Waiting for the task: (returnval){ [ 636.295390] env[69367]: value = "task-4233770" [ 636.295390] env[69367]: _type = "Task" [ 636.295390] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 636.305885] env[69367]: DEBUG oslo_vmware.api [None req-60cb02e4-845c-48dc-b93d-4834b21efeca tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Task: {'id': task-4233770, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.394264] env[69367]: DEBUG nova.compute.manager [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] [instance: ba4d981a-19f7-41ef-b7d1-a3f3830fe725] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 636.414993] env[69367]: DEBUG nova.network.neutron [None req-7c66dfce-abbe-4ff0-97b3-5ac8ccf941b4 tempest-ServerAddressesTestJSON-1140368918 tempest-ServerAddressesTestJSON-1140368918-project-member] [instance: 07a65426-e348-4f6f-8898-45409e15c554] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 636.563627] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5854b3bc-c81a-4206-8ac8-88051bb306a9 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Lock "92bdb1b1-d8ab-46b2-9037-ee8fea4642ce" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.514s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 636.581272] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4233768, 'name': CreateVM_Task} progress is 99%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.624439] env[69367]: DEBUG nova.network.neutron [req-e7b5d759-fa19-4554-9e60-5d1144b4486a req-2c4068e8-0139-432a-b1c9-38f19f89c475 service nova] [instance: 92bdb1b1-d8ab-46b2-9037-ee8fea4642ce] Updated VIF entry in instance network info cache for port a7fe18e3-9f20-481e-b223-1b2907709041. {{(pid=69367) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 636.624601] env[69367]: DEBUG nova.network.neutron [req-e7b5d759-fa19-4554-9e60-5d1144b4486a req-2c4068e8-0139-432a-b1c9-38f19f89c475 service nova] [instance: 92bdb1b1-d8ab-46b2-9037-ee8fea4642ce] Updating instance_info_cache with network_info: [{"id": "a7fe18e3-9f20-481e-b223-1b2907709041", "address": "fa:16:3e:23:f7:a6", "network": {"id": "afabf187-1494-4413-aabe-98c264714fc7", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-2057114382-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4840471cf7844a1aac397d7ee7db12d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d377d75-3add-4a15-8691-74b2eb010924", "external-id": "nsx-vlan-transportzone-71", "segmentation_id": 71, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa7fe18e3-9f", "ovs_interfaceid": "a7fe18e3-9f20-481e-b223-1b2907709041", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 636.668311] env[69367]: DEBUG nova.scheduler.client.report [None req-af1ecc9b-a257-426e-95f0-d09e8a0d0989 tempest-ServersTestBootFromVolume-945135387 tempest-ServersTestBootFromVolume-945135387-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 636.693326] env[69367]: DEBUG nova.scheduler.client.report [None req-af1ecc9b-a257-426e-95f0-d09e8a0d0989 tempest-ServersTestBootFromVolume-945135387 tempest-ServersTestBootFromVolume-945135387-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 636.693553] env[69367]: DEBUG nova.compute.provider_tree [None req-af1ecc9b-a257-426e-95f0-d09e8a0d0989 tempest-ServersTestBootFromVolume-945135387 tempest-ServersTestBootFromVolume-945135387-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 636.709884] env[69367]: DEBUG nova.scheduler.client.report [None req-af1ecc9b-a257-426e-95f0-d09e8a0d0989 tempest-ServersTestBootFromVolume-945135387 tempest-ServersTestBootFromVolume-945135387-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 636.724844] env[69367]: INFO nova.scheduler.client.report [None req-3682f758-1480-4596-869e-15f72e534e72 tempest-ServersWithSpecificFlavorTestJSON-1357925327 tempest-ServersWithSpecificFlavorTestJSON-1357925327-project-member] Deleted allocations for instance f11c0d77-b53c-4d96-820d-bd3ff3a08955 [ 636.738022] env[69367]: DEBUG nova.scheduler.client.report [None req-af1ecc9b-a257-426e-95f0-d09e8a0d0989 tempest-ServersTestBootFromVolume-945135387 tempest-ServersTestBootFromVolume-945135387-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 636.768967] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2d781f53-8f2b-470c-a4e4-419a37b9e8c6 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Acquiring lock "937c05e9-06f1-4a5f-9f8c-ac40c262ce4e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 636.768967] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2d781f53-8f2b-470c-a4e4-419a37b9e8c6 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Lock "937c05e9-06f1-4a5f-9f8c-ac40c262ce4e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 636.768967] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2d781f53-8f2b-470c-a4e4-419a37b9e8c6 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Acquiring lock "937c05e9-06f1-4a5f-9f8c-ac40c262ce4e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 636.768967] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2d781f53-8f2b-470c-a4e4-419a37b9e8c6 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Lock "937c05e9-06f1-4a5f-9f8c-ac40c262ce4e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 636.769253] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2d781f53-8f2b-470c-a4e4-419a37b9e8c6 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Lock "937c05e9-06f1-4a5f-9f8c-ac40c262ce4e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 636.771419] env[69367]: INFO nova.compute.manager [None req-2d781f53-8f2b-470c-a4e4-419a37b9e8c6 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] [instance: 937c05e9-06f1-4a5f-9f8c-ac40c262ce4e] Terminating instance [ 636.812119] env[69367]: DEBUG oslo_vmware.api [None req-60cb02e4-845c-48dc-b93d-4834b21efeca tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Task: {'id': task-4233770, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.263999} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 636.812527] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-60cb02e4-845c-48dc-b93d-4834b21efeca tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Deleted the datastore file {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 636.812736] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-60cb02e4-845c-48dc-b93d-4834b21efeca tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] [instance: 1302cad6-55b7-4905-92c1-dfdd37042e30] Deleted contents of the VM from datastore datastore2 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 636.812854] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-60cb02e4-845c-48dc-b93d-4834b21efeca tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] [instance: 1302cad6-55b7-4905-92c1-dfdd37042e30] Instance destroyed {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 636.813039] env[69367]: INFO nova.compute.manager [None req-60cb02e4-845c-48dc-b93d-4834b21efeca tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] [instance: 1302cad6-55b7-4905-92c1-dfdd37042e30] Took 1.16 seconds to destroy the instance on the hypervisor. [ 636.813292] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-60cb02e4-845c-48dc-b93d-4834b21efeca tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 636.816061] env[69367]: DEBUG nova.compute.manager [-] [instance: 1302cad6-55b7-4905-92c1-dfdd37042e30] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 636.816217] env[69367]: DEBUG nova.network.neutron [-] [instance: 1302cad6-55b7-4905-92c1-dfdd37042e30] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 636.919076] env[69367]: DEBUG oslo_concurrency.lockutils [None req-7c66dfce-abbe-4ff0-97b3-5ac8ccf941b4 tempest-ServerAddressesTestJSON-1140368918 tempest-ServerAddressesTestJSON-1140368918-project-member] Releasing lock "refresh_cache-07a65426-e348-4f6f-8898-45409e15c554" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 636.919335] env[69367]: DEBUG nova.compute.manager [None req-7c66dfce-abbe-4ff0-97b3-5ac8ccf941b4 tempest-ServerAddressesTestJSON-1140368918 tempest-ServerAddressesTestJSON-1140368918-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 636.920409] env[69367]: DEBUG nova.compute.manager [None req-7c66dfce-abbe-4ff0-97b3-5ac8ccf941b4 tempest-ServerAddressesTestJSON-1140368918 tempest-ServerAddressesTestJSON-1140368918-project-member] [instance: 07a65426-e348-4f6f-8898-45409e15c554] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 636.920665] env[69367]: DEBUG nova.network.neutron [None req-7c66dfce-abbe-4ff0-97b3-5ac8ccf941b4 tempest-ServerAddressesTestJSON-1140368918 tempest-ServerAddressesTestJSON-1140368918-project-member] [instance: 07a65426-e348-4f6f-8898-45409e15c554] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 636.924419] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 636.998728] env[69367]: DEBUG nova.network.neutron [None req-7c66dfce-abbe-4ff0-97b3-5ac8ccf941b4 tempest-ServerAddressesTestJSON-1140368918 tempest-ServerAddressesTestJSON-1140368918-project-member] [instance: 07a65426-e348-4f6f-8898-45409e15c554] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 637.067150] env[69367]: DEBUG nova.compute.manager [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] [instance: fa4a5dbc-b885-4439-8520-0bfff38438b3] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 637.093465] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4233768, 'name': CreateVM_Task} progress is 99%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.132142] env[69367]: DEBUG oslo_concurrency.lockutils [req-e7b5d759-fa19-4554-9e60-5d1144b4486a req-2c4068e8-0139-432a-b1c9-38f19f89c475 service nova] Releasing lock "refresh_cache-92bdb1b1-d8ab-46b2-9037-ee8fea4642ce" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 637.133092] env[69367]: DEBUG nova.compute.manager [req-e7b5d759-fa19-4554-9e60-5d1144b4486a req-2c4068e8-0139-432a-b1c9-38f19f89c475 service nova] [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] Received event network-vif-deleted-4051893d-10cc-4cb9-8e30-089e8d3d4286 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 637.154653] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff2afe61-c1fa-490b-acc5-362b8d05a409 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.167731] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-947af3f4-7e81-47a1-afcf-951ee62fd8a0 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.206732] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-137182cd-ee00-4f98-b726-909d848c7e15 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.215761] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae830a7a-6d52-4b9d-affb-a712c081f3e2 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.231446] env[69367]: DEBUG nova.compute.provider_tree [None req-af1ecc9b-a257-426e-95f0-d09e8a0d0989 tempest-ServersTestBootFromVolume-945135387 tempest-ServersTestBootFromVolume-945135387-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 637.239446] env[69367]: DEBUG oslo_concurrency.lockutils [None req-3682f758-1480-4596-869e-15f72e534e72 tempest-ServersWithSpecificFlavorTestJSON-1357925327 tempest-ServersWithSpecificFlavorTestJSON-1357925327-project-member] Lock "f11c0d77-b53c-4d96-820d-bd3ff3a08955" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.429s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 637.279383] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2d781f53-8f2b-470c-a4e4-419a37b9e8c6 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Acquiring lock "refresh_cache-937c05e9-06f1-4a5f-9f8c-ac40c262ce4e" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 637.279519] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2d781f53-8f2b-470c-a4e4-419a37b9e8c6 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Acquired lock "refresh_cache-937c05e9-06f1-4a5f-9f8c-ac40c262ce4e" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 637.280036] env[69367]: DEBUG nova.network.neutron [None req-2d781f53-8f2b-470c-a4e4-419a37b9e8c6 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] [instance: 937c05e9-06f1-4a5f-9f8c-ac40c262ce4e] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 637.501461] env[69367]: DEBUG nova.network.neutron [None req-7c66dfce-abbe-4ff0-97b3-5ac8ccf941b4 tempest-ServerAddressesTestJSON-1140368918 tempest-ServerAddressesTestJSON-1140368918-project-member] [instance: 07a65426-e348-4f6f-8898-45409e15c554] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 637.588702] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4233768, 'name': CreateVM_Task} progress is 99%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.608261] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 637.742861] env[69367]: DEBUG nova.compute.manager [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab365570-ac29-4094-be4c-d49563a465c8] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 637.752751] env[69367]: ERROR nova.scheduler.client.report [None req-af1ecc9b-a257-426e-95f0-d09e8a0d0989 tempest-ServersTestBootFromVolume-945135387 tempest-ServersTestBootFromVolume-945135387-project-member] [req-fda33f05-c314-4048-88f0-68e3a91ea0d6] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-fda33f05-c314-4048-88f0-68e3a91ea0d6"}]} [ 637.753132] env[69367]: DEBUG oslo_concurrency.lockutils [None req-af1ecc9b-a257-426e-95f0-d09e8a0d0989 tempest-ServersTestBootFromVolume-945135387 tempest-ServersTestBootFromVolume-945135387-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.125s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 637.753817] env[69367]: ERROR nova.compute.manager [None req-af1ecc9b-a257-426e-95f0-d09e8a0d0989 tempest-ServersTestBootFromVolume-945135387 tempest-ServersTestBootFromVolume-945135387-project-member] [instance: 1df0055c-938e-4048-938c-37590b0138ac] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 637.753817] env[69367]: ERROR nova.compute.manager [instance: 1df0055c-938e-4048-938c-37590b0138ac] Traceback (most recent call last): [ 637.753817] env[69367]: ERROR nova.compute.manager [instance: 1df0055c-938e-4048-938c-37590b0138ac] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 637.753817] env[69367]: ERROR nova.compute.manager [instance: 1df0055c-938e-4048-938c-37590b0138ac] yield [ 637.753817] env[69367]: ERROR nova.compute.manager [instance: 1df0055c-938e-4048-938c-37590b0138ac] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 637.753817] env[69367]: ERROR nova.compute.manager [instance: 1df0055c-938e-4048-938c-37590b0138ac] self.set_inventory_for_provider( [ 637.753817] env[69367]: ERROR nova.compute.manager [instance: 1df0055c-938e-4048-938c-37590b0138ac] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 637.753817] env[69367]: ERROR nova.compute.manager [instance: 1df0055c-938e-4048-938c-37590b0138ac] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 637.754263] env[69367]: ERROR nova.compute.manager [instance: 1df0055c-938e-4048-938c-37590b0138ac] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-fda33f05-c314-4048-88f0-68e3a91ea0d6"}]} [ 637.754263] env[69367]: ERROR nova.compute.manager [instance: 1df0055c-938e-4048-938c-37590b0138ac] [ 637.754263] env[69367]: ERROR nova.compute.manager [instance: 1df0055c-938e-4048-938c-37590b0138ac] During handling of the above exception, another exception occurred: [ 637.754263] env[69367]: ERROR nova.compute.manager [instance: 1df0055c-938e-4048-938c-37590b0138ac] [ 637.754263] env[69367]: ERROR nova.compute.manager [instance: 1df0055c-938e-4048-938c-37590b0138ac] Traceback (most recent call last): [ 637.754263] env[69367]: ERROR nova.compute.manager [instance: 1df0055c-938e-4048-938c-37590b0138ac] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 637.754263] env[69367]: ERROR nova.compute.manager [instance: 1df0055c-938e-4048-938c-37590b0138ac] with self.rt.instance_claim(context, instance, node, allocs, [ 637.754263] env[69367]: ERROR nova.compute.manager [instance: 1df0055c-938e-4048-938c-37590b0138ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 637.754263] env[69367]: ERROR nova.compute.manager [instance: 1df0055c-938e-4048-938c-37590b0138ac] return f(*args, **kwargs) [ 637.754544] env[69367]: ERROR nova.compute.manager [instance: 1df0055c-938e-4048-938c-37590b0138ac] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 637.754544] env[69367]: ERROR nova.compute.manager [instance: 1df0055c-938e-4048-938c-37590b0138ac] self._update(elevated, cn) [ 637.754544] env[69367]: ERROR nova.compute.manager [instance: 1df0055c-938e-4048-938c-37590b0138ac] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 637.754544] env[69367]: ERROR nova.compute.manager [instance: 1df0055c-938e-4048-938c-37590b0138ac] self._update_to_placement(context, compute_node, startup) [ 637.754544] env[69367]: ERROR nova.compute.manager [instance: 1df0055c-938e-4048-938c-37590b0138ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 637.754544] env[69367]: ERROR nova.compute.manager [instance: 1df0055c-938e-4048-938c-37590b0138ac] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 637.754544] env[69367]: ERROR nova.compute.manager [instance: 1df0055c-938e-4048-938c-37590b0138ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 637.754544] env[69367]: ERROR nova.compute.manager [instance: 1df0055c-938e-4048-938c-37590b0138ac] return attempt.get(self._wrap_exception) [ 637.754544] env[69367]: ERROR nova.compute.manager [instance: 1df0055c-938e-4048-938c-37590b0138ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 637.754544] env[69367]: ERROR nova.compute.manager [instance: 1df0055c-938e-4048-938c-37590b0138ac] six.reraise(self.value[0], self.value[1], self.value[2]) [ 637.754544] env[69367]: ERROR nova.compute.manager [instance: 1df0055c-938e-4048-938c-37590b0138ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 637.754544] env[69367]: ERROR nova.compute.manager [instance: 1df0055c-938e-4048-938c-37590b0138ac] raise value [ 637.754544] env[69367]: ERROR nova.compute.manager [instance: 1df0055c-938e-4048-938c-37590b0138ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 637.754869] env[69367]: ERROR nova.compute.manager [instance: 1df0055c-938e-4048-938c-37590b0138ac] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 637.754869] env[69367]: ERROR nova.compute.manager [instance: 1df0055c-938e-4048-938c-37590b0138ac] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 637.754869] env[69367]: ERROR nova.compute.manager [instance: 1df0055c-938e-4048-938c-37590b0138ac] self.reportclient.update_from_provider_tree( [ 637.754869] env[69367]: ERROR nova.compute.manager [instance: 1df0055c-938e-4048-938c-37590b0138ac] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 637.754869] env[69367]: ERROR nova.compute.manager [instance: 1df0055c-938e-4048-938c-37590b0138ac] with catch_all(pd.uuid): [ 637.754869] env[69367]: ERROR nova.compute.manager [instance: 1df0055c-938e-4048-938c-37590b0138ac] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 637.754869] env[69367]: ERROR nova.compute.manager [instance: 1df0055c-938e-4048-938c-37590b0138ac] self.gen.throw(typ, value, traceback) [ 637.754869] env[69367]: ERROR nova.compute.manager [instance: 1df0055c-938e-4048-938c-37590b0138ac] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 637.754869] env[69367]: ERROR nova.compute.manager [instance: 1df0055c-938e-4048-938c-37590b0138ac] raise exception.ResourceProviderSyncFailed() [ 637.754869] env[69367]: ERROR nova.compute.manager [instance: 1df0055c-938e-4048-938c-37590b0138ac] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 637.754869] env[69367]: ERROR nova.compute.manager [instance: 1df0055c-938e-4048-938c-37590b0138ac] [ 637.755156] env[69367]: DEBUG nova.compute.utils [None req-af1ecc9b-a257-426e-95f0-d09e8a0d0989 tempest-ServersTestBootFromVolume-945135387 tempest-ServersTestBootFromVolume-945135387-project-member] [instance: 1df0055c-938e-4048-938c-37590b0138ac] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 637.759483] env[69367]: DEBUG nova.compute.manager [None req-af1ecc9b-a257-426e-95f0-d09e8a0d0989 tempest-ServersTestBootFromVolume-945135387 tempest-ServersTestBootFromVolume-945135387-project-member] [instance: 1df0055c-938e-4048-938c-37590b0138ac] Build of instance 1df0055c-938e-4048-938c-37590b0138ac was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 637.759949] env[69367]: DEBUG nova.compute.manager [None req-af1ecc9b-a257-426e-95f0-d09e8a0d0989 tempest-ServersTestBootFromVolume-945135387 tempest-ServersTestBootFromVolume-945135387-project-member] [instance: 1df0055c-938e-4048-938c-37590b0138ac] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 637.761495] env[69367]: DEBUG oslo_concurrency.lockutils [None req-af1ecc9b-a257-426e-95f0-d09e8a0d0989 tempest-ServersTestBootFromVolume-945135387 tempest-ServersTestBootFromVolume-945135387-project-member] Acquiring lock "refresh_cache-1df0055c-938e-4048-938c-37590b0138ac" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 637.761495] env[69367]: DEBUG oslo_concurrency.lockutils [None req-af1ecc9b-a257-426e-95f0-d09e8a0d0989 tempest-ServersTestBootFromVolume-945135387 tempest-ServersTestBootFromVolume-945135387-project-member] Acquired lock "refresh_cache-1df0055c-938e-4048-938c-37590b0138ac" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 637.761495] env[69367]: DEBUG nova.network.neutron [None req-af1ecc9b-a257-426e-95f0-d09e8a0d0989 tempest-ServersTestBootFromVolume-945135387 tempest-ServersTestBootFromVolume-945135387-project-member] [instance: 1df0055c-938e-4048-938c-37590b0138ac] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 637.761888] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2aa8ff71-f61c-43db-af35-540a770a2ca5 tempest-MigrationsAdminTest-1077393646 tempest-MigrationsAdminTest-1077393646-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.854s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 637.765191] env[69367]: INFO nova.compute.claims [None req-2aa8ff71-f61c-43db-af35-540a770a2ca5 tempest-MigrationsAdminTest-1077393646 tempest-MigrationsAdminTest-1077393646-project-member] [instance: db11f64c-0881-4a06-ba8d-6f52ec7fab16] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 637.813262] env[69367]: DEBUG nova.network.neutron [None req-2d781f53-8f2b-470c-a4e4-419a37b9e8c6 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] [instance: 937c05e9-06f1-4a5f-9f8c-ac40c262ce4e] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 637.831571] env[69367]: DEBUG nova.network.neutron [-] [instance: 1302cad6-55b7-4905-92c1-dfdd37042e30] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 637.920153] env[69367]: DEBUG nova.network.neutron [None req-2d781f53-8f2b-470c-a4e4-419a37b9e8c6 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] [instance: 937c05e9-06f1-4a5f-9f8c-ac40c262ce4e] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 638.010269] env[69367]: INFO nova.compute.manager [None req-7c66dfce-abbe-4ff0-97b3-5ac8ccf941b4 tempest-ServerAddressesTestJSON-1140368918 tempest-ServerAddressesTestJSON-1140368918-project-member] [instance: 07a65426-e348-4f6f-8898-45409e15c554] Took 1.09 seconds to deallocate network for instance. [ 638.087899] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4233768, 'name': CreateVM_Task, 'duration_secs': 1.527058} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 638.088668] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Created VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 638.089632] env[69367]: DEBUG oslo_concurrency.lockutils [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 638.089963] env[69367]: DEBUG oslo_concurrency.lockutils [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 638.090335] env[69367]: DEBUG oslo_concurrency.lockutils [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 638.090625] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f173b5c1-2c2e-4a44-9117-35ac8b226988 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.099056] env[69367]: DEBUG oslo_vmware.api [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Waiting for the task: (returnval){ [ 638.099056] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52ca15fb-f35d-856f-193a-fa29ba00e7e1" [ 638.099056] env[69367]: _type = "Task" [ 638.099056] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 638.112892] env[69367]: DEBUG oslo_vmware.api [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52ca15fb-f35d-856f-193a-fa29ba00e7e1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.185923] env[69367]: DEBUG oslo_concurrency.lockutils [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Acquiring lock "92c27615-d377-492f-a9db-ff45b2e71537" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 638.185923] env[69367]: DEBUG oslo_concurrency.lockutils [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Lock "92c27615-d377-492f-a9db-ff45b2e71537" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 638.278497] env[69367]: DEBUG oslo_concurrency.lockutils [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 638.308827] env[69367]: DEBUG nova.network.neutron [None req-af1ecc9b-a257-426e-95f0-d09e8a0d0989 tempest-ServersTestBootFromVolume-945135387 tempest-ServersTestBootFromVolume-945135387-project-member] [instance: 1df0055c-938e-4048-938c-37590b0138ac] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 638.335143] env[69367]: INFO nova.compute.manager [-] [instance: 1302cad6-55b7-4905-92c1-dfdd37042e30] Took 1.52 seconds to deallocate network for instance. [ 638.422491] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2d781f53-8f2b-470c-a4e4-419a37b9e8c6 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Releasing lock "refresh_cache-937c05e9-06f1-4a5f-9f8c-ac40c262ce4e" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 638.423884] env[69367]: DEBUG nova.compute.manager [None req-2d781f53-8f2b-470c-a4e4-419a37b9e8c6 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] [instance: 937c05e9-06f1-4a5f-9f8c-ac40c262ce4e] Start destroying the instance on the hypervisor. {{(pid=69367) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 638.424116] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-2d781f53-8f2b-470c-a4e4-419a37b9e8c6 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] [instance: 937c05e9-06f1-4a5f-9f8c-ac40c262ce4e] Destroying instance {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 638.425015] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f3237e2-31d8-4db3-af26-3ab430092244 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.437431] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d781f53-8f2b-470c-a4e4-419a37b9e8c6 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] [instance: 937c05e9-06f1-4a5f-9f8c-ac40c262ce4e] Powering off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 638.437686] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f680980c-628e-472e-b098-5823d876520f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.450015] env[69367]: DEBUG oslo_vmware.api [None req-2d781f53-8f2b-470c-a4e4-419a37b9e8c6 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Waiting for the task: (returnval){ [ 638.450015] env[69367]: value = "task-4233771" [ 638.450015] env[69367]: _type = "Task" [ 638.450015] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 638.453486] env[69367]: DEBUG nova.network.neutron [None req-af1ecc9b-a257-426e-95f0-d09e8a0d0989 tempest-ServersTestBootFromVolume-945135387 tempest-ServersTestBootFromVolume-945135387-project-member] [instance: 1df0055c-938e-4048-938c-37590b0138ac] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 638.465149] env[69367]: DEBUG oslo_vmware.api [None req-2d781f53-8f2b-470c-a4e4-419a37b9e8c6 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Task: {'id': task-4233771, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.624200] env[69367]: DEBUG oslo_vmware.api [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52ca15fb-f35d-856f-193a-fa29ba00e7e1, 'name': SearchDatastore_Task, 'duration_secs': 0.014468} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 638.624683] env[69367]: DEBUG oslo_concurrency.lockutils [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 638.624831] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Processing image 2b099420-9152-4d93-9609-4c9317824c11 {{(pid=69367) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 638.625262] env[69367]: DEBUG oslo_concurrency.lockutils [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 638.625262] env[69367]: DEBUG oslo_concurrency.lockutils [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 638.626092] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 638.626469] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7fd6022e-c183-473e-8737-877854a077a9 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.640721] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 638.640931] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69367) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 638.641898] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-65308c2c-ee23-448f-8343-2958d326ef52 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.649106] env[69367]: DEBUG oslo_vmware.api [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Waiting for the task: (returnval){ [ 638.649106] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52b86e91-5368-8b27-3431-38471de0d7ae" [ 638.649106] env[69367]: _type = "Task" [ 638.649106] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 638.675150] env[69367]: DEBUG oslo_vmware.api [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52b86e91-5368-8b27-3431-38471de0d7ae, 'name': SearchDatastore_Task, 'duration_secs': 0.013911} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 638.677197] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c824654e-f0b4-4f8c-86b9-505e28b2f35b {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.686388] env[69367]: DEBUG oslo_vmware.api [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Waiting for the task: (returnval){ [ 638.686388] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]5239109a-9f4a-8cb3-b285-ee99de636536" [ 638.686388] env[69367]: _type = "Task" [ 638.686388] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 638.698640] env[69367]: DEBUG oslo_vmware.api [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]5239109a-9f4a-8cb3-b285-ee99de636536, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.799436] env[69367]: DEBUG nova.scheduler.client.report [None req-2aa8ff71-f61c-43db-af35-540a770a2ca5 tempest-MigrationsAdminTest-1077393646 tempest-MigrationsAdminTest-1077393646-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 638.819348] env[69367]: DEBUG nova.scheduler.client.report [None req-2aa8ff71-f61c-43db-af35-540a770a2ca5 tempest-MigrationsAdminTest-1077393646 tempest-MigrationsAdminTest-1077393646-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 638.820017] env[69367]: DEBUG nova.compute.provider_tree [None req-2aa8ff71-f61c-43db-af35-540a770a2ca5 tempest-MigrationsAdminTest-1077393646 tempest-MigrationsAdminTest-1077393646-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 638.836689] env[69367]: DEBUG nova.scheduler.client.report [None req-2aa8ff71-f61c-43db-af35-540a770a2ca5 tempest-MigrationsAdminTest-1077393646 tempest-MigrationsAdminTest-1077393646-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 638.844242] env[69367]: DEBUG oslo_concurrency.lockutils [None req-60cb02e4-845c-48dc-b93d-4834b21efeca tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 638.857363] env[69367]: DEBUG nova.scheduler.client.report [None req-2aa8ff71-f61c-43db-af35-540a770a2ca5 tempest-MigrationsAdminTest-1077393646 tempest-MigrationsAdminTest-1077393646-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 638.960092] env[69367]: DEBUG oslo_concurrency.lockutils [None req-af1ecc9b-a257-426e-95f0-d09e8a0d0989 tempest-ServersTestBootFromVolume-945135387 tempest-ServersTestBootFromVolume-945135387-project-member] Releasing lock "refresh_cache-1df0055c-938e-4048-938c-37590b0138ac" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 638.961840] env[69367]: DEBUG nova.compute.manager [None req-af1ecc9b-a257-426e-95f0-d09e8a0d0989 tempest-ServersTestBootFromVolume-945135387 tempest-ServersTestBootFromVolume-945135387-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 638.961840] env[69367]: DEBUG nova.compute.manager [None req-af1ecc9b-a257-426e-95f0-d09e8a0d0989 tempest-ServersTestBootFromVolume-945135387 tempest-ServersTestBootFromVolume-945135387-project-member] [instance: 1df0055c-938e-4048-938c-37590b0138ac] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 638.962630] env[69367]: DEBUG nova.network.neutron [None req-af1ecc9b-a257-426e-95f0-d09e8a0d0989 tempest-ServersTestBootFromVolume-945135387 tempest-ServersTestBootFromVolume-945135387-project-member] [instance: 1df0055c-938e-4048-938c-37590b0138ac] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 638.970018] env[69367]: DEBUG oslo_vmware.api [None req-2d781f53-8f2b-470c-a4e4-419a37b9e8c6 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Task: {'id': task-4233771, 'name': PowerOffVM_Task, 'duration_secs': 0.236417} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 638.970789] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d781f53-8f2b-470c-a4e4-419a37b9e8c6 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] [instance: 937c05e9-06f1-4a5f-9f8c-ac40c262ce4e] Powered off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 638.970944] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-2d781f53-8f2b-470c-a4e4-419a37b9e8c6 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] [instance: 937c05e9-06f1-4a5f-9f8c-ac40c262ce4e] Unregistering the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 638.971301] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-763d58a0-3b71-43e4-9ffd-2017dfaca5f4 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.001510] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-2d781f53-8f2b-470c-a4e4-419a37b9e8c6 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] [instance: 937c05e9-06f1-4a5f-9f8c-ac40c262ce4e] Unregistered the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 639.001761] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-2d781f53-8f2b-470c-a4e4-419a37b9e8c6 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] [instance: 937c05e9-06f1-4a5f-9f8c-ac40c262ce4e] Deleting contents of the VM from datastore datastore2 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 639.002221] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d781f53-8f2b-470c-a4e4-419a37b9e8c6 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Deleting the datastore file [datastore2] 937c05e9-06f1-4a5f-9f8c-ac40c262ce4e {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 639.005777] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-662ea2c2-45b6-4973-97bd-965572908323 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.013521] env[69367]: DEBUG oslo_vmware.api [None req-2d781f53-8f2b-470c-a4e4-419a37b9e8c6 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Waiting for the task: (returnval){ [ 639.013521] env[69367]: value = "task-4233773" [ 639.013521] env[69367]: _type = "Task" [ 639.013521] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.029533] env[69367]: DEBUG oslo_vmware.api [None req-2d781f53-8f2b-470c-a4e4-419a37b9e8c6 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Task: {'id': task-4233773, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.061892] env[69367]: INFO nova.scheduler.client.report [None req-7c66dfce-abbe-4ff0-97b3-5ac8ccf941b4 tempest-ServerAddressesTestJSON-1140368918 tempest-ServerAddressesTestJSON-1140368918-project-member] Deleted allocations for instance 07a65426-e348-4f6f-8898-45409e15c554 [ 639.182686] env[69367]: DEBUG oslo_concurrency.lockutils [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Acquiring lock "d2f8328d-fd05-4e63-9cbd-a6e3ec948964" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 639.183029] env[69367]: DEBUG oslo_concurrency.lockutils [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Lock "d2f8328d-fd05-4e63-9cbd-a6e3ec948964" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 639.205218] env[69367]: DEBUG oslo_vmware.api [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]5239109a-9f4a-8cb3-b285-ee99de636536, 'name': SearchDatastore_Task, 'duration_secs': 0.014707} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.205571] env[69367]: DEBUG oslo_concurrency.lockutils [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 639.206188] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore1] 4e346ed1-36e9-421d-975f-e8bb6f05c0a0/4e346ed1-36e9-421d-975f-e8bb6f05c0a0.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 639.206188] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4df5713b-ee2b-4b02-9f3e-27dd1b4aaa82 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.218172] env[69367]: DEBUG oslo_vmware.api [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Waiting for the task: (returnval){ [ 639.218172] env[69367]: value = "task-4233774" [ 639.218172] env[69367]: _type = "Task" [ 639.218172] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.228615] env[69367]: DEBUG oslo_vmware.api [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Task: {'id': task-4233774, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.245977] env[69367]: DEBUG nova.network.neutron [None req-af1ecc9b-a257-426e-95f0-d09e8a0d0989 tempest-ServersTestBootFromVolume-945135387 tempest-ServersTestBootFromVolume-945135387-project-member] [instance: 1df0055c-938e-4048-938c-37590b0138ac] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 639.270318] env[69367]: DEBUG oslo_concurrency.lockutils [None req-da89b7a9-d8ce-4d71-a741-e22d0c2a0166 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Acquiring lock "92bdb1b1-d8ab-46b2-9037-ee8fea4642ce" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 639.270949] env[69367]: DEBUG oslo_concurrency.lockutils [None req-da89b7a9-d8ce-4d71-a741-e22d0c2a0166 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Lock "92bdb1b1-d8ab-46b2-9037-ee8fea4642ce" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 639.271265] env[69367]: DEBUG oslo_concurrency.lockutils [None req-da89b7a9-d8ce-4d71-a741-e22d0c2a0166 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Acquiring lock "92bdb1b1-d8ab-46b2-9037-ee8fea4642ce-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 639.271644] env[69367]: DEBUG oslo_concurrency.lockutils [None req-da89b7a9-d8ce-4d71-a741-e22d0c2a0166 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Lock "92bdb1b1-d8ab-46b2-9037-ee8fea4642ce-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 639.271923] env[69367]: DEBUG oslo_concurrency.lockutils [None req-da89b7a9-d8ce-4d71-a741-e22d0c2a0166 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Lock "92bdb1b1-d8ab-46b2-9037-ee8fea4642ce-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 639.276739] env[69367]: INFO nova.compute.manager [None req-da89b7a9-d8ce-4d71-a741-e22d0c2a0166 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] [instance: 92bdb1b1-d8ab-46b2-9037-ee8fea4642ce] Terminating instance [ 639.279605] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1db29576-bf40-46c0-b1be-269faa3b4e98 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.289917] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60c90dc3-63f8-44e2-a79d-de5a470937a9 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.328167] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08f7ffda-7267-4fe4-bb09-26ec8061ba11 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.338583] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46e1912f-39a7-4500-bed6-99fa37ede360 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.356984] env[69367]: DEBUG nova.compute.provider_tree [None req-2aa8ff71-f61c-43db-af35-540a770a2ca5 tempest-MigrationsAdminTest-1077393646 tempest-MigrationsAdminTest-1077393646-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 639.527907] env[69367]: DEBUG oslo_vmware.api [None req-2d781f53-8f2b-470c-a4e4-419a37b9e8c6 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Task: {'id': task-4233773, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.262067} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.528334] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d781f53-8f2b-470c-a4e4-419a37b9e8c6 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Deleted the datastore file {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 639.528484] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-2d781f53-8f2b-470c-a4e4-419a37b9e8c6 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] [instance: 937c05e9-06f1-4a5f-9f8c-ac40c262ce4e] Deleted contents of the VM from datastore datastore2 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 639.528701] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-2d781f53-8f2b-470c-a4e4-419a37b9e8c6 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] [instance: 937c05e9-06f1-4a5f-9f8c-ac40c262ce4e] Instance destroyed {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 639.528917] env[69367]: INFO nova.compute.manager [None req-2d781f53-8f2b-470c-a4e4-419a37b9e8c6 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] [instance: 937c05e9-06f1-4a5f-9f8c-ac40c262ce4e] Took 1.10 seconds to destroy the instance on the hypervisor. [ 639.529221] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2d781f53-8f2b-470c-a4e4-419a37b9e8c6 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 639.529451] env[69367]: DEBUG nova.compute.manager [-] [instance: 937c05e9-06f1-4a5f-9f8c-ac40c262ce4e] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 639.529567] env[69367]: DEBUG nova.network.neutron [-] [instance: 937c05e9-06f1-4a5f-9f8c-ac40c262ce4e] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 639.560892] env[69367]: DEBUG nova.network.neutron [-] [instance: 937c05e9-06f1-4a5f-9f8c-ac40c262ce4e] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 639.570042] env[69367]: DEBUG oslo_concurrency.lockutils [None req-7c66dfce-abbe-4ff0-97b3-5ac8ccf941b4 tempest-ServerAddressesTestJSON-1140368918 tempest-ServerAddressesTestJSON-1140368918-project-member] Lock "07a65426-e348-4f6f-8898-45409e15c554" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.025s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 639.730856] env[69367]: DEBUG oslo_vmware.api [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Task: {'id': task-4233774, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.748950] env[69367]: DEBUG nova.network.neutron [None req-af1ecc9b-a257-426e-95f0-d09e8a0d0989 tempest-ServersTestBootFromVolume-945135387 tempest-ServersTestBootFromVolume-945135387-project-member] [instance: 1df0055c-938e-4048-938c-37590b0138ac] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 639.784330] env[69367]: DEBUG nova.compute.manager [req-3e1c23f9-f45d-4c08-bd92-a7dcbbca7874 req-44cc974c-b5f3-4167-aeb3-8f25a1a03edd service nova] [instance: 1302cad6-55b7-4905-92c1-dfdd37042e30] Received event network-vif-deleted-c1cd0433-e331-4e76-af42-c5cd0421b041 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 639.785085] env[69367]: DEBUG nova.compute.manager [None req-da89b7a9-d8ce-4d71-a741-e22d0c2a0166 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] [instance: 92bdb1b1-d8ab-46b2-9037-ee8fea4642ce] Start destroying the instance on the hypervisor. {{(pid=69367) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 639.785203] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-da89b7a9-d8ce-4d71-a741-e22d0c2a0166 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] [instance: 92bdb1b1-d8ab-46b2-9037-ee8fea4642ce] Destroying instance {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 639.786566] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7403ca7e-4405-4fe6-92d8-1b761b7c48af {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.796196] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-da89b7a9-d8ce-4d71-a741-e22d0c2a0166 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] [instance: 92bdb1b1-d8ab-46b2-9037-ee8fea4642ce] Powering off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 639.796572] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-59e2a012-8996-44d9-8acf-2e96bcd46ff5 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.805782] env[69367]: DEBUG oslo_vmware.api [None req-da89b7a9-d8ce-4d71-a741-e22d0c2a0166 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Waiting for the task: (returnval){ [ 639.805782] env[69367]: value = "task-4233775" [ 639.805782] env[69367]: _type = "Task" [ 639.805782] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.822741] env[69367]: DEBUG oslo_vmware.api [None req-da89b7a9-d8ce-4d71-a741-e22d0c2a0166 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Task: {'id': task-4233775, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.892181] env[69367]: ERROR nova.scheduler.client.report [None req-2aa8ff71-f61c-43db-af35-540a770a2ca5 tempest-MigrationsAdminTest-1077393646 tempest-MigrationsAdminTest-1077393646-project-member] [req-23342b0e-5db9-42f1-98c3-116863eae0df] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-23342b0e-5db9-42f1-98c3-116863eae0df"}]} [ 639.892617] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2aa8ff71-f61c-43db-af35-540a770a2ca5 tempest-MigrationsAdminTest-1077393646 tempest-MigrationsAdminTest-1077393646-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.131s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 639.893525] env[69367]: ERROR nova.compute.manager [None req-2aa8ff71-f61c-43db-af35-540a770a2ca5 tempest-MigrationsAdminTest-1077393646 tempest-MigrationsAdminTest-1077393646-project-member] [instance: db11f64c-0881-4a06-ba8d-6f52ec7fab16] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 639.893525] env[69367]: ERROR nova.compute.manager [instance: db11f64c-0881-4a06-ba8d-6f52ec7fab16] Traceback (most recent call last): [ 639.893525] env[69367]: ERROR nova.compute.manager [instance: db11f64c-0881-4a06-ba8d-6f52ec7fab16] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 639.893525] env[69367]: ERROR nova.compute.manager [instance: db11f64c-0881-4a06-ba8d-6f52ec7fab16] yield [ 639.893525] env[69367]: ERROR nova.compute.manager [instance: db11f64c-0881-4a06-ba8d-6f52ec7fab16] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 639.893525] env[69367]: ERROR nova.compute.manager [instance: db11f64c-0881-4a06-ba8d-6f52ec7fab16] self.set_inventory_for_provider( [ 639.893525] env[69367]: ERROR nova.compute.manager [instance: db11f64c-0881-4a06-ba8d-6f52ec7fab16] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 639.893525] env[69367]: ERROR nova.compute.manager [instance: db11f64c-0881-4a06-ba8d-6f52ec7fab16] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 639.893969] env[69367]: ERROR nova.compute.manager [instance: db11f64c-0881-4a06-ba8d-6f52ec7fab16] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-23342b0e-5db9-42f1-98c3-116863eae0df"}]} [ 639.893969] env[69367]: ERROR nova.compute.manager [instance: db11f64c-0881-4a06-ba8d-6f52ec7fab16] [ 639.893969] env[69367]: ERROR nova.compute.manager [instance: db11f64c-0881-4a06-ba8d-6f52ec7fab16] During handling of the above exception, another exception occurred: [ 639.893969] env[69367]: ERROR nova.compute.manager [instance: db11f64c-0881-4a06-ba8d-6f52ec7fab16] [ 639.893969] env[69367]: ERROR nova.compute.manager [instance: db11f64c-0881-4a06-ba8d-6f52ec7fab16] Traceback (most recent call last): [ 639.893969] env[69367]: ERROR nova.compute.manager [instance: db11f64c-0881-4a06-ba8d-6f52ec7fab16] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 639.893969] env[69367]: ERROR nova.compute.manager [instance: db11f64c-0881-4a06-ba8d-6f52ec7fab16] with self.rt.instance_claim(context, instance, node, allocs, [ 639.893969] env[69367]: ERROR nova.compute.manager [instance: db11f64c-0881-4a06-ba8d-6f52ec7fab16] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 639.893969] env[69367]: ERROR nova.compute.manager [instance: db11f64c-0881-4a06-ba8d-6f52ec7fab16] return f(*args, **kwargs) [ 639.894527] env[69367]: ERROR nova.compute.manager [instance: db11f64c-0881-4a06-ba8d-6f52ec7fab16] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 639.894527] env[69367]: ERROR nova.compute.manager [instance: db11f64c-0881-4a06-ba8d-6f52ec7fab16] self._update(elevated, cn) [ 639.894527] env[69367]: ERROR nova.compute.manager [instance: db11f64c-0881-4a06-ba8d-6f52ec7fab16] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 639.894527] env[69367]: ERROR nova.compute.manager [instance: db11f64c-0881-4a06-ba8d-6f52ec7fab16] self._update_to_placement(context, compute_node, startup) [ 639.894527] env[69367]: ERROR nova.compute.manager [instance: db11f64c-0881-4a06-ba8d-6f52ec7fab16] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 639.894527] env[69367]: ERROR nova.compute.manager [instance: db11f64c-0881-4a06-ba8d-6f52ec7fab16] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 639.894527] env[69367]: ERROR nova.compute.manager [instance: db11f64c-0881-4a06-ba8d-6f52ec7fab16] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 639.894527] env[69367]: ERROR nova.compute.manager [instance: db11f64c-0881-4a06-ba8d-6f52ec7fab16] return attempt.get(self._wrap_exception) [ 639.894527] env[69367]: ERROR nova.compute.manager [instance: db11f64c-0881-4a06-ba8d-6f52ec7fab16] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 639.894527] env[69367]: ERROR nova.compute.manager [instance: db11f64c-0881-4a06-ba8d-6f52ec7fab16] six.reraise(self.value[0], self.value[1], self.value[2]) [ 639.894527] env[69367]: ERROR nova.compute.manager [instance: db11f64c-0881-4a06-ba8d-6f52ec7fab16] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 639.894527] env[69367]: ERROR nova.compute.manager [instance: db11f64c-0881-4a06-ba8d-6f52ec7fab16] raise value [ 639.894527] env[69367]: ERROR nova.compute.manager [instance: db11f64c-0881-4a06-ba8d-6f52ec7fab16] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 639.895093] env[69367]: ERROR nova.compute.manager [instance: db11f64c-0881-4a06-ba8d-6f52ec7fab16] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 639.895093] env[69367]: ERROR nova.compute.manager [instance: db11f64c-0881-4a06-ba8d-6f52ec7fab16] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 639.895093] env[69367]: ERROR nova.compute.manager [instance: db11f64c-0881-4a06-ba8d-6f52ec7fab16] self.reportclient.update_from_provider_tree( [ 639.895093] env[69367]: ERROR nova.compute.manager [instance: db11f64c-0881-4a06-ba8d-6f52ec7fab16] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 639.895093] env[69367]: ERROR nova.compute.manager [instance: db11f64c-0881-4a06-ba8d-6f52ec7fab16] with catch_all(pd.uuid): [ 639.895093] env[69367]: ERROR nova.compute.manager [instance: db11f64c-0881-4a06-ba8d-6f52ec7fab16] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 639.895093] env[69367]: ERROR nova.compute.manager [instance: db11f64c-0881-4a06-ba8d-6f52ec7fab16] self.gen.throw(typ, value, traceback) [ 639.895093] env[69367]: ERROR nova.compute.manager [instance: db11f64c-0881-4a06-ba8d-6f52ec7fab16] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 639.895093] env[69367]: ERROR nova.compute.manager [instance: db11f64c-0881-4a06-ba8d-6f52ec7fab16] raise exception.ResourceProviderSyncFailed() [ 639.895093] env[69367]: ERROR nova.compute.manager [instance: db11f64c-0881-4a06-ba8d-6f52ec7fab16] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 639.895093] env[69367]: ERROR nova.compute.manager [instance: db11f64c-0881-4a06-ba8d-6f52ec7fab16] [ 639.895627] env[69367]: DEBUG nova.compute.utils [None req-2aa8ff71-f61c-43db-af35-540a770a2ca5 tempest-MigrationsAdminTest-1077393646 tempest-MigrationsAdminTest-1077393646-project-member] [instance: db11f64c-0881-4a06-ba8d-6f52ec7fab16] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 639.896760] env[69367]: DEBUG oslo_concurrency.lockutils [None req-05f9ed91-7693-4818-a6f4-6a76f2db0941 tempest-ListImageFiltersTestJSON-127647720 tempest-ListImageFiltersTestJSON-127647720-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.712s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 639.899091] env[69367]: INFO nova.compute.claims [None req-05f9ed91-7693-4818-a6f4-6a76f2db0941 tempest-ListImageFiltersTestJSON-127647720 tempest-ListImageFiltersTestJSON-127647720-project-member] [instance: 83fb3858-0c21-42f1-a815-f007bcdb8561] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 639.908871] env[69367]: DEBUG nova.compute.manager [None req-2aa8ff71-f61c-43db-af35-540a770a2ca5 tempest-MigrationsAdminTest-1077393646 tempest-MigrationsAdminTest-1077393646-project-member] [instance: db11f64c-0881-4a06-ba8d-6f52ec7fab16] Build of instance db11f64c-0881-4a06-ba8d-6f52ec7fab16 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 639.909574] env[69367]: DEBUG nova.compute.manager [None req-2aa8ff71-f61c-43db-af35-540a770a2ca5 tempest-MigrationsAdminTest-1077393646 tempest-MigrationsAdminTest-1077393646-project-member] [instance: db11f64c-0881-4a06-ba8d-6f52ec7fab16] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 639.909662] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2aa8ff71-f61c-43db-af35-540a770a2ca5 tempest-MigrationsAdminTest-1077393646 tempest-MigrationsAdminTest-1077393646-project-member] Acquiring lock "refresh_cache-db11f64c-0881-4a06-ba8d-6f52ec7fab16" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 639.910068] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2aa8ff71-f61c-43db-af35-540a770a2ca5 tempest-MigrationsAdminTest-1077393646 tempest-MigrationsAdminTest-1077393646-project-member] Acquired lock "refresh_cache-db11f64c-0881-4a06-ba8d-6f52ec7fab16" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 639.910141] env[69367]: DEBUG nova.network.neutron [None req-2aa8ff71-f61c-43db-af35-540a770a2ca5 tempest-MigrationsAdminTest-1077393646 tempest-MigrationsAdminTest-1077393646-project-member] [instance: db11f64c-0881-4a06-ba8d-6f52ec7fab16] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 640.063529] env[69367]: DEBUG nova.network.neutron [-] [instance: 937c05e9-06f1-4a5f-9f8c-ac40c262ce4e] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 640.073486] env[69367]: DEBUG nova.compute.manager [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 640.120619] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0cf850bb-f4f3-4902-82da-ca6e283aae47 tempest-FloatingIPsAssociationNegativeTestJSON-1638525184 tempest-FloatingIPsAssociationNegativeTestJSON-1638525184-project-member] Acquiring lock "8001cca4-9b9f-4425-b6e4-d27866395886" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 640.120853] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0cf850bb-f4f3-4902-82da-ca6e283aae47 tempest-FloatingIPsAssociationNegativeTestJSON-1638525184 tempest-FloatingIPsAssociationNegativeTestJSON-1638525184-project-member] Lock "8001cca4-9b9f-4425-b6e4-d27866395886" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 640.233115] env[69367]: DEBUG oslo_vmware.api [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Task: {'id': task-4233774, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.8755} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.234631] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore1] 4e346ed1-36e9-421d-975f-e8bb6f05c0a0/4e346ed1-36e9-421d-975f-e8bb6f05c0a0.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 640.234631] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Extending root virtual disk to 1048576 {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 640.234631] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-eeca7b7e-9e2a-4524-a223-97fb5e3dd42f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.247567] env[69367]: DEBUG oslo_vmware.api [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Waiting for the task: (returnval){ [ 640.247567] env[69367]: value = "task-4233776" [ 640.247567] env[69367]: _type = "Task" [ 640.247567] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.264989] env[69367]: INFO nova.compute.manager [None req-af1ecc9b-a257-426e-95f0-d09e8a0d0989 tempest-ServersTestBootFromVolume-945135387 tempest-ServersTestBootFromVolume-945135387-project-member] [instance: 1df0055c-938e-4048-938c-37590b0138ac] Took 1.30 seconds to deallocate network for instance. [ 640.267621] env[69367]: DEBUG oslo_vmware.api [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Task: {'id': task-4233776, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.318892] env[69367]: DEBUG oslo_vmware.api [None req-da89b7a9-d8ce-4d71-a741-e22d0c2a0166 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Task: {'id': task-4233775, 'name': PowerOffVM_Task, 'duration_secs': 0.333747} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.321751] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-da89b7a9-d8ce-4d71-a741-e22d0c2a0166 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] [instance: 92bdb1b1-d8ab-46b2-9037-ee8fea4642ce] Powered off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 640.322144] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-da89b7a9-d8ce-4d71-a741-e22d0c2a0166 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] [instance: 92bdb1b1-d8ab-46b2-9037-ee8fea4642ce] Unregistering the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 640.327248] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b6d708fe-64ef-40d0-b7a9-74f791504d3a {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.403250] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-da89b7a9-d8ce-4d71-a741-e22d0c2a0166 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] [instance: 92bdb1b1-d8ab-46b2-9037-ee8fea4642ce] Unregistered the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 640.403505] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-da89b7a9-d8ce-4d71-a741-e22d0c2a0166 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] [instance: 92bdb1b1-d8ab-46b2-9037-ee8fea4642ce] Deleting contents of the VM from datastore datastore2 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 640.403686] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-da89b7a9-d8ce-4d71-a741-e22d0c2a0166 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Deleting the datastore file [datastore2] 92bdb1b1-d8ab-46b2-9037-ee8fea4642ce {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 640.403966] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-46f6dd67-0ccd-434a-8c31-7dd7593ecd26 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.416545] env[69367]: DEBUG oslo_vmware.api [None req-da89b7a9-d8ce-4d71-a741-e22d0c2a0166 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Waiting for the task: (returnval){ [ 640.416545] env[69367]: value = "task-4233778" [ 640.416545] env[69367]: _type = "Task" [ 640.416545] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.434296] env[69367]: DEBUG oslo_vmware.api [None req-da89b7a9-d8ce-4d71-a741-e22d0c2a0166 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Task: {'id': task-4233778, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.456861] env[69367]: DEBUG nova.network.neutron [None req-2aa8ff71-f61c-43db-af35-540a770a2ca5 tempest-MigrationsAdminTest-1077393646 tempest-MigrationsAdminTest-1077393646-project-member] [instance: db11f64c-0881-4a06-ba8d-6f52ec7fab16] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 640.566550] env[69367]: INFO nova.compute.manager [-] [instance: 937c05e9-06f1-4a5f-9f8c-ac40c262ce4e] Took 1.04 seconds to deallocate network for instance. [ 640.579300] env[69367]: DEBUG nova.network.neutron [None req-2aa8ff71-f61c-43db-af35-540a770a2ca5 tempest-MigrationsAdminTest-1077393646 tempest-MigrationsAdminTest-1077393646-project-member] [instance: db11f64c-0881-4a06-ba8d-6f52ec7fab16] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 640.613582] env[69367]: DEBUG oslo_concurrency.lockutils [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 640.761971] env[69367]: DEBUG oslo_vmware.api [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Task: {'id': task-4233776, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071734} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.762798] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Extended root virtual disk {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 640.763406] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93212ec3-6ce8-4fc8-8160-520cef8d62df {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.791888] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Reconfiguring VM instance instance-00000005 to attach disk [datastore1] 4e346ed1-36e9-421d-975f-e8bb6f05c0a0/4e346ed1-36e9-421d-975f-e8bb6f05c0a0.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 640.792292] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5f6aa6fe-9c9c-47a4-be4b-36c2b6b119c5 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.816167] env[69367]: DEBUG oslo_vmware.api [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Waiting for the task: (returnval){ [ 640.816167] env[69367]: value = "task-4233779" [ 640.816167] env[69367]: _type = "Task" [ 640.816167] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.826418] env[69367]: DEBUG oslo_vmware.api [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Task: {'id': task-4233779, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.934466] env[69367]: DEBUG oslo_vmware.api [None req-da89b7a9-d8ce-4d71-a741-e22d0c2a0166 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Task: {'id': task-4233778, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.951650] env[69367]: DEBUG nova.scheduler.client.report [None req-05f9ed91-7693-4818-a6f4-6a76f2db0941 tempest-ListImageFiltersTestJSON-127647720 tempest-ListImageFiltersTestJSON-127647720-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 640.973746] env[69367]: DEBUG nova.scheduler.client.report [None req-05f9ed91-7693-4818-a6f4-6a76f2db0941 tempest-ListImageFiltersTestJSON-127647720 tempest-ListImageFiltersTestJSON-127647720-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 640.974179] env[69367]: DEBUG nova.compute.provider_tree [None req-05f9ed91-7693-4818-a6f4-6a76f2db0941 tempest-ListImageFiltersTestJSON-127647720 tempest-ListImageFiltersTestJSON-127647720-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 640.978065] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a34c9c6d-e38d-4163-9cc3-a39df0a2ad31 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Acquiring lock "f66c0467-a408-4e56-abdf-2c19cc3d9c11" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 640.978065] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a34c9c6d-e38d-4163-9cc3-a39df0a2ad31 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Lock "f66c0467-a408-4e56-abdf-2c19cc3d9c11" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 640.989291] env[69367]: DEBUG nova.scheduler.client.report [None req-05f9ed91-7693-4818-a6f4-6a76f2db0941 tempest-ListImageFiltersTestJSON-127647720 tempest-ListImageFiltersTestJSON-127647720-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 641.004373] env[69367]: DEBUG nova.compute.manager [req-298c00ef-3682-4b71-84f8-8b81851b5a30 req-524e1a41-e613-4984-bd95-c9647b26aaac service nova] [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] Received event network-changed-013efad5-0b57-43e9-b662-10e31d24d8af {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 641.004704] env[69367]: DEBUG nova.compute.manager [req-298c00ef-3682-4b71-84f8-8b81851b5a30 req-524e1a41-e613-4984-bd95-c9647b26aaac service nova] [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] Refreshing instance network info cache due to event network-changed-013efad5-0b57-43e9-b662-10e31d24d8af. {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11772}} [ 641.004792] env[69367]: DEBUG oslo_concurrency.lockutils [req-298c00ef-3682-4b71-84f8-8b81851b5a30 req-524e1a41-e613-4984-bd95-c9647b26aaac service nova] Acquiring lock "refresh_cache-e1c7d100-4ad7-4871-970f-bb7562bfc6fc" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 641.004906] env[69367]: DEBUG oslo_concurrency.lockutils [req-298c00ef-3682-4b71-84f8-8b81851b5a30 req-524e1a41-e613-4984-bd95-c9647b26aaac service nova] Acquired lock "refresh_cache-e1c7d100-4ad7-4871-970f-bb7562bfc6fc" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 641.005469] env[69367]: DEBUG nova.network.neutron [req-298c00ef-3682-4b71-84f8-8b81851b5a30 req-524e1a41-e613-4984-bd95-c9647b26aaac service nova] [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] Refreshing network info cache for port 013efad5-0b57-43e9-b662-10e31d24d8af {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 641.022672] env[69367]: DEBUG nova.scheduler.client.report [None req-05f9ed91-7693-4818-a6f4-6a76f2db0941 tempest-ListImageFiltersTestJSON-127647720 tempest-ListImageFiltersTestJSON-127647720-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 641.073935] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2d781f53-8f2b-470c-a4e4-419a37b9e8c6 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 641.083334] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2aa8ff71-f61c-43db-af35-540a770a2ca5 tempest-MigrationsAdminTest-1077393646 tempest-MigrationsAdminTest-1077393646-project-member] Releasing lock "refresh_cache-db11f64c-0881-4a06-ba8d-6f52ec7fab16" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 641.083619] env[69367]: DEBUG nova.compute.manager [None req-2aa8ff71-f61c-43db-af35-540a770a2ca5 tempest-MigrationsAdminTest-1077393646 tempest-MigrationsAdminTest-1077393646-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 641.083786] env[69367]: DEBUG nova.compute.manager [None req-2aa8ff71-f61c-43db-af35-540a770a2ca5 tempest-MigrationsAdminTest-1077393646 tempest-MigrationsAdminTest-1077393646-project-member] [instance: db11f64c-0881-4a06-ba8d-6f52ec7fab16] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 641.083894] env[69367]: DEBUG nova.network.neutron [None req-2aa8ff71-f61c-43db-af35-540a770a2ca5 tempest-MigrationsAdminTest-1077393646 tempest-MigrationsAdminTest-1077393646-project-member] [instance: db11f64c-0881-4a06-ba8d-6f52ec7fab16] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 641.107544] env[69367]: DEBUG nova.network.neutron [None req-2aa8ff71-f61c-43db-af35-540a770a2ca5 tempest-MigrationsAdminTest-1077393646 tempest-MigrationsAdminTest-1077393646-project-member] [instance: db11f64c-0881-4a06-ba8d-6f52ec7fab16] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 641.301707] env[69367]: INFO nova.scheduler.client.report [None req-af1ecc9b-a257-426e-95f0-d09e8a0d0989 tempest-ServersTestBootFromVolume-945135387 tempest-ServersTestBootFromVolume-945135387-project-member] Deleted allocations for instance 1df0055c-938e-4048-938c-37590b0138ac [ 641.330397] env[69367]: DEBUG oslo_vmware.api [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Task: {'id': task-4233779, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.404384] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5be75e1b-b965-4789-9324-7b259a06bf7b {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.413372] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-036d891b-3b3f-4672-ab30-4549c892e9a8 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.451122] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c987dee6-54ea-4259-812c-3ee9e0952346 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.459651] env[69367]: DEBUG oslo_vmware.api [None req-da89b7a9-d8ce-4d71-a741-e22d0c2a0166 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Task: {'id': task-4233778, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.538654} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.461860] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-da89b7a9-d8ce-4d71-a741-e22d0c2a0166 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Deleted the datastore file {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 641.462118] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-da89b7a9-d8ce-4d71-a741-e22d0c2a0166 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] [instance: 92bdb1b1-d8ab-46b2-9037-ee8fea4642ce] Deleted contents of the VM from datastore datastore2 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 641.462313] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-da89b7a9-d8ce-4d71-a741-e22d0c2a0166 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] [instance: 92bdb1b1-d8ab-46b2-9037-ee8fea4642ce] Instance destroyed {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 641.462486] env[69367]: INFO nova.compute.manager [None req-da89b7a9-d8ce-4d71-a741-e22d0c2a0166 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] [instance: 92bdb1b1-d8ab-46b2-9037-ee8fea4642ce] Took 1.68 seconds to destroy the instance on the hypervisor. [ 641.462723] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-da89b7a9-d8ce-4d71-a741-e22d0c2a0166 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 641.462991] env[69367]: DEBUG nova.compute.manager [-] [instance: 92bdb1b1-d8ab-46b2-9037-ee8fea4642ce] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 641.463162] env[69367]: DEBUG nova.network.neutron [-] [instance: 92bdb1b1-d8ab-46b2-9037-ee8fea4642ce] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 641.465927] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5987b7f-3748-402b-9bff-677ae062ab1f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.480988] env[69367]: DEBUG nova.compute.provider_tree [None req-05f9ed91-7693-4818-a6f4-6a76f2db0941 tempest-ListImageFiltersTestJSON-127647720 tempest-ListImageFiltersTestJSON-127647720-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 641.610445] env[69367]: DEBUG nova.network.neutron [None req-2aa8ff71-f61c-43db-af35-540a770a2ca5 tempest-MigrationsAdminTest-1077393646 tempest-MigrationsAdminTest-1077393646-project-member] [instance: db11f64c-0881-4a06-ba8d-6f52ec7fab16] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 641.812408] env[69367]: DEBUG oslo_concurrency.lockutils [None req-af1ecc9b-a257-426e-95f0-d09e8a0d0989 tempest-ServersTestBootFromVolume-945135387 tempest-ServersTestBootFromVolume-945135387-project-member] Lock "1df0055c-938e-4048-938c-37590b0138ac" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.315s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 641.828192] env[69367]: DEBUG oslo_vmware.api [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Task: {'id': task-4233779, 'name': ReconfigVM_Task, 'duration_secs': 0.995462} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.829143] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Reconfigured VM instance instance-00000005 to attach disk [datastore1] 4e346ed1-36e9-421d-975f-e8bb6f05c0a0/4e346ed1-36e9-421d-975f-e8bb6f05c0a0.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 641.829779] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5c588c30-439d-4c38-8167-1323ffc6d5a0 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.841792] env[69367]: DEBUG oslo_vmware.api [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Waiting for the task: (returnval){ [ 641.841792] env[69367]: value = "task-4233780" [ 641.841792] env[69367]: _type = "Task" [ 641.841792] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.855353] env[69367]: DEBUG oslo_vmware.api [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Task: {'id': task-4233780, 'name': Rename_Task} progress is 5%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.014565] env[69367]: ERROR nova.scheduler.client.report [None req-05f9ed91-7693-4818-a6f4-6a76f2db0941 tempest-ListImageFiltersTestJSON-127647720 tempest-ListImageFiltersTestJSON-127647720-project-member] [req-281dd924-c76a-46e4-ae27-f54da2ae9fa3] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-281dd924-c76a-46e4-ae27-f54da2ae9fa3"}]} [ 642.014968] env[69367]: DEBUG oslo_concurrency.lockutils [None req-05f9ed91-7693-4818-a6f4-6a76f2db0941 tempest-ListImageFiltersTestJSON-127647720 tempest-ListImageFiltersTestJSON-127647720-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.119s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 642.015587] env[69367]: ERROR nova.compute.manager [None req-05f9ed91-7693-4818-a6f4-6a76f2db0941 tempest-ListImageFiltersTestJSON-127647720 tempest-ListImageFiltersTestJSON-127647720-project-member] [instance: 83fb3858-0c21-42f1-a815-f007bcdb8561] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 642.015587] env[69367]: ERROR nova.compute.manager [instance: 83fb3858-0c21-42f1-a815-f007bcdb8561] Traceback (most recent call last): [ 642.015587] env[69367]: ERROR nova.compute.manager [instance: 83fb3858-0c21-42f1-a815-f007bcdb8561] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 642.015587] env[69367]: ERROR nova.compute.manager [instance: 83fb3858-0c21-42f1-a815-f007bcdb8561] yield [ 642.015587] env[69367]: ERROR nova.compute.manager [instance: 83fb3858-0c21-42f1-a815-f007bcdb8561] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 642.015587] env[69367]: ERROR nova.compute.manager [instance: 83fb3858-0c21-42f1-a815-f007bcdb8561] self.set_inventory_for_provider( [ 642.015587] env[69367]: ERROR nova.compute.manager [instance: 83fb3858-0c21-42f1-a815-f007bcdb8561] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 642.015587] env[69367]: ERROR nova.compute.manager [instance: 83fb3858-0c21-42f1-a815-f007bcdb8561] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 642.016110] env[69367]: ERROR nova.compute.manager [instance: 83fb3858-0c21-42f1-a815-f007bcdb8561] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-281dd924-c76a-46e4-ae27-f54da2ae9fa3"}]} [ 642.016110] env[69367]: ERROR nova.compute.manager [instance: 83fb3858-0c21-42f1-a815-f007bcdb8561] [ 642.016110] env[69367]: ERROR nova.compute.manager [instance: 83fb3858-0c21-42f1-a815-f007bcdb8561] During handling of the above exception, another exception occurred: [ 642.016110] env[69367]: ERROR nova.compute.manager [instance: 83fb3858-0c21-42f1-a815-f007bcdb8561] [ 642.016110] env[69367]: ERROR nova.compute.manager [instance: 83fb3858-0c21-42f1-a815-f007bcdb8561] Traceback (most recent call last): [ 642.016110] env[69367]: ERROR nova.compute.manager [instance: 83fb3858-0c21-42f1-a815-f007bcdb8561] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 642.016110] env[69367]: ERROR nova.compute.manager [instance: 83fb3858-0c21-42f1-a815-f007bcdb8561] with self.rt.instance_claim(context, instance, node, allocs, [ 642.016110] env[69367]: ERROR nova.compute.manager [instance: 83fb3858-0c21-42f1-a815-f007bcdb8561] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 642.016110] env[69367]: ERROR nova.compute.manager [instance: 83fb3858-0c21-42f1-a815-f007bcdb8561] return f(*args, **kwargs) [ 642.016411] env[69367]: ERROR nova.compute.manager [instance: 83fb3858-0c21-42f1-a815-f007bcdb8561] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 642.016411] env[69367]: ERROR nova.compute.manager [instance: 83fb3858-0c21-42f1-a815-f007bcdb8561] self._update(elevated, cn) [ 642.016411] env[69367]: ERROR nova.compute.manager [instance: 83fb3858-0c21-42f1-a815-f007bcdb8561] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 642.016411] env[69367]: ERROR nova.compute.manager [instance: 83fb3858-0c21-42f1-a815-f007bcdb8561] self._update_to_placement(context, compute_node, startup) [ 642.016411] env[69367]: ERROR nova.compute.manager [instance: 83fb3858-0c21-42f1-a815-f007bcdb8561] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 642.016411] env[69367]: ERROR nova.compute.manager [instance: 83fb3858-0c21-42f1-a815-f007bcdb8561] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 642.016411] env[69367]: ERROR nova.compute.manager [instance: 83fb3858-0c21-42f1-a815-f007bcdb8561] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 642.016411] env[69367]: ERROR nova.compute.manager [instance: 83fb3858-0c21-42f1-a815-f007bcdb8561] return attempt.get(self._wrap_exception) [ 642.016411] env[69367]: ERROR nova.compute.manager [instance: 83fb3858-0c21-42f1-a815-f007bcdb8561] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 642.016411] env[69367]: ERROR nova.compute.manager [instance: 83fb3858-0c21-42f1-a815-f007bcdb8561] six.reraise(self.value[0], self.value[1], self.value[2]) [ 642.016411] env[69367]: ERROR nova.compute.manager [instance: 83fb3858-0c21-42f1-a815-f007bcdb8561] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 642.016411] env[69367]: ERROR nova.compute.manager [instance: 83fb3858-0c21-42f1-a815-f007bcdb8561] raise value [ 642.016411] env[69367]: ERROR nova.compute.manager [instance: 83fb3858-0c21-42f1-a815-f007bcdb8561] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 642.016769] env[69367]: ERROR nova.compute.manager [instance: 83fb3858-0c21-42f1-a815-f007bcdb8561] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 642.016769] env[69367]: ERROR nova.compute.manager [instance: 83fb3858-0c21-42f1-a815-f007bcdb8561] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 642.016769] env[69367]: ERROR nova.compute.manager [instance: 83fb3858-0c21-42f1-a815-f007bcdb8561] self.reportclient.update_from_provider_tree( [ 642.016769] env[69367]: ERROR nova.compute.manager [instance: 83fb3858-0c21-42f1-a815-f007bcdb8561] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 642.016769] env[69367]: ERROR nova.compute.manager [instance: 83fb3858-0c21-42f1-a815-f007bcdb8561] with catch_all(pd.uuid): [ 642.016769] env[69367]: ERROR nova.compute.manager [instance: 83fb3858-0c21-42f1-a815-f007bcdb8561] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 642.016769] env[69367]: ERROR nova.compute.manager [instance: 83fb3858-0c21-42f1-a815-f007bcdb8561] self.gen.throw(typ, value, traceback) [ 642.016769] env[69367]: ERROR nova.compute.manager [instance: 83fb3858-0c21-42f1-a815-f007bcdb8561] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 642.016769] env[69367]: ERROR nova.compute.manager [instance: 83fb3858-0c21-42f1-a815-f007bcdb8561] raise exception.ResourceProviderSyncFailed() [ 642.016769] env[69367]: ERROR nova.compute.manager [instance: 83fb3858-0c21-42f1-a815-f007bcdb8561] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 642.016769] env[69367]: ERROR nova.compute.manager [instance: 83fb3858-0c21-42f1-a815-f007bcdb8561] [ 642.017238] env[69367]: DEBUG nova.compute.utils [None req-05f9ed91-7693-4818-a6f4-6a76f2db0941 tempest-ListImageFiltersTestJSON-127647720 tempest-ListImageFiltersTestJSON-127647720-project-member] [instance: 83fb3858-0c21-42f1-a815-f007bcdb8561] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 642.018517] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e1cc102c-7874-4dd6-88e4-3f9c2c2d9197 tempest-ListImageFiltersTestJSON-127647720 tempest-ListImageFiltersTestJSON-127647720-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.265s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 642.020110] env[69367]: INFO nova.compute.claims [None req-e1cc102c-7874-4dd6-88e4-3f9c2c2d9197 tempest-ListImageFiltersTestJSON-127647720 tempest-ListImageFiltersTestJSON-127647720-project-member] [instance: 5994e782-02fc-47a9-81f8-aa4b6d9fec4b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 642.023239] env[69367]: DEBUG nova.compute.manager [None req-05f9ed91-7693-4818-a6f4-6a76f2db0941 tempest-ListImageFiltersTestJSON-127647720 tempest-ListImageFiltersTestJSON-127647720-project-member] [instance: 83fb3858-0c21-42f1-a815-f007bcdb8561] Build of instance 83fb3858-0c21-42f1-a815-f007bcdb8561 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 642.023414] env[69367]: DEBUG nova.compute.manager [None req-05f9ed91-7693-4818-a6f4-6a76f2db0941 tempest-ListImageFiltersTestJSON-127647720 tempest-ListImageFiltersTestJSON-127647720-project-member] [instance: 83fb3858-0c21-42f1-a815-f007bcdb8561] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 642.023798] env[69367]: DEBUG oslo_concurrency.lockutils [None req-05f9ed91-7693-4818-a6f4-6a76f2db0941 tempest-ListImageFiltersTestJSON-127647720 tempest-ListImageFiltersTestJSON-127647720-project-member] Acquiring lock "refresh_cache-83fb3858-0c21-42f1-a815-f007bcdb8561" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 642.023965] env[69367]: DEBUG oslo_concurrency.lockutils [None req-05f9ed91-7693-4818-a6f4-6a76f2db0941 tempest-ListImageFiltersTestJSON-127647720 tempest-ListImageFiltersTestJSON-127647720-project-member] Acquired lock "refresh_cache-83fb3858-0c21-42f1-a815-f007bcdb8561" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 642.024180] env[69367]: DEBUG nova.network.neutron [None req-05f9ed91-7693-4818-a6f4-6a76f2db0941 tempest-ListImageFiltersTestJSON-127647720 tempest-ListImageFiltersTestJSON-127647720-project-member] [instance: 83fb3858-0c21-42f1-a815-f007bcdb8561] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 642.115022] env[69367]: INFO nova.compute.manager [None req-2aa8ff71-f61c-43db-af35-540a770a2ca5 tempest-MigrationsAdminTest-1077393646 tempest-MigrationsAdminTest-1077393646-project-member] [instance: db11f64c-0881-4a06-ba8d-6f52ec7fab16] Took 1.03 seconds to deallocate network for instance. [ 642.228029] env[69367]: DEBUG nova.network.neutron [req-298c00ef-3682-4b71-84f8-8b81851b5a30 req-524e1a41-e613-4984-bd95-c9647b26aaac service nova] [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] Updated VIF entry in instance network info cache for port 013efad5-0b57-43e9-b662-10e31d24d8af. {{(pid=69367) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 642.228029] env[69367]: DEBUG nova.network.neutron [req-298c00ef-3682-4b71-84f8-8b81851b5a30 req-524e1a41-e613-4984-bd95-c9647b26aaac service nova] [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] Updating instance_info_cache with network_info: [{"id": "013efad5-0b57-43e9-b662-10e31d24d8af", "address": "fa:16:3e:1a:c6:8e", "network": {"id": "dd68ce65-5682-4b4c-913c-cf699d2146be", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-341319856-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2375d6603eef45069be4a3541519002a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2c019b6-3ef3-4c8f-95bd-edede2c554a9", "external-id": "nsx-vlan-transportzone-364", "segmentation_id": 364, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap013efad5-0b", "ovs_interfaceid": "013efad5-0b57-43e9-b662-10e31d24d8af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 642.314770] env[69367]: DEBUG nova.compute.manager [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] [instance: 92c27615-d377-492f-a9db-ff45b2e71537] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 642.360770] env[69367]: DEBUG oslo_vmware.api [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Task: {'id': task-4233780, 'name': Rename_Task, 'duration_secs': 0.183851} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.360770] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Powering on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 642.360770] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-896d3eaa-516e-427b-a17e-5ed9f8831387 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.368304] env[69367]: DEBUG oslo_vmware.api [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Waiting for the task: (returnval){ [ 642.368304] env[69367]: value = "task-4233781" [ 642.368304] env[69367]: _type = "Task" [ 642.368304] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.378946] env[69367]: DEBUG oslo_vmware.api [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Task: {'id': task-4233781, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.564177] env[69367]: DEBUG nova.network.neutron [None req-05f9ed91-7693-4818-a6f4-6a76f2db0941 tempest-ListImageFiltersTestJSON-127647720 tempest-ListImageFiltersTestJSON-127647720-project-member] [instance: 83fb3858-0c21-42f1-a815-f007bcdb8561] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 642.689229] env[69367]: DEBUG nova.network.neutron [None req-05f9ed91-7693-4818-a6f4-6a76f2db0941 tempest-ListImageFiltersTestJSON-127647720 tempest-ListImageFiltersTestJSON-127647720-project-member] [instance: 83fb3858-0c21-42f1-a815-f007bcdb8561] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 642.732845] env[69367]: DEBUG oslo_concurrency.lockutils [req-298c00ef-3682-4b71-84f8-8b81851b5a30 req-524e1a41-e613-4984-bd95-c9647b26aaac service nova] Releasing lock "refresh_cache-e1c7d100-4ad7-4871-970f-bb7562bfc6fc" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 642.786543] env[69367]: DEBUG nova.network.neutron [-] [instance: 92bdb1b1-d8ab-46b2-9037-ee8fea4642ce] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 642.843511] env[69367]: DEBUG oslo_concurrency.lockutils [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 642.883208] env[69367]: DEBUG oslo_vmware.api [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Task: {'id': task-4233781, 'name': PowerOnVM_Task} progress is 87%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.059062] env[69367]: DEBUG nova.scheduler.client.report [None req-e1cc102c-7874-4dd6-88e4-3f9c2c2d9197 tempest-ListImageFiltersTestJSON-127647720 tempest-ListImageFiltersTestJSON-127647720-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 643.078152] env[69367]: DEBUG nova.scheduler.client.report [None req-e1cc102c-7874-4dd6-88e4-3f9c2c2d9197 tempest-ListImageFiltersTestJSON-127647720 tempest-ListImageFiltersTestJSON-127647720-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 643.078152] env[69367]: DEBUG nova.compute.provider_tree [None req-e1cc102c-7874-4dd6-88e4-3f9c2c2d9197 tempest-ListImageFiltersTestJSON-127647720 tempest-ListImageFiltersTestJSON-127647720-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 643.094116] env[69367]: DEBUG nova.scheduler.client.report [None req-e1cc102c-7874-4dd6-88e4-3f9c2c2d9197 tempest-ListImageFiltersTestJSON-127647720 tempest-ListImageFiltersTestJSON-127647720-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 643.113079] env[69367]: DEBUG nova.scheduler.client.report [None req-e1cc102c-7874-4dd6-88e4-3f9c2c2d9197 tempest-ListImageFiltersTestJSON-127647720 tempest-ListImageFiltersTestJSON-127647720-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 643.157617] env[69367]: INFO nova.scheduler.client.report [None req-2aa8ff71-f61c-43db-af35-540a770a2ca5 tempest-MigrationsAdminTest-1077393646 tempest-MigrationsAdminTest-1077393646-project-member] Deleted allocations for instance db11f64c-0881-4a06-ba8d-6f52ec7fab16 [ 643.192284] env[69367]: DEBUG oslo_concurrency.lockutils [None req-05f9ed91-7693-4818-a6f4-6a76f2db0941 tempest-ListImageFiltersTestJSON-127647720 tempest-ListImageFiltersTestJSON-127647720-project-member] Releasing lock "refresh_cache-83fb3858-0c21-42f1-a815-f007bcdb8561" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 643.192284] env[69367]: DEBUG nova.compute.manager [None req-05f9ed91-7693-4818-a6f4-6a76f2db0941 tempest-ListImageFiltersTestJSON-127647720 tempest-ListImageFiltersTestJSON-127647720-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 643.192284] env[69367]: DEBUG nova.compute.manager [None req-05f9ed91-7693-4818-a6f4-6a76f2db0941 tempest-ListImageFiltersTestJSON-127647720 tempest-ListImageFiltersTestJSON-127647720-project-member] [instance: 83fb3858-0c21-42f1-a815-f007bcdb8561] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 643.192525] env[69367]: DEBUG nova.network.neutron [None req-05f9ed91-7693-4818-a6f4-6a76f2db0941 tempest-ListImageFiltersTestJSON-127647720 tempest-ListImageFiltersTestJSON-127647720-project-member] [instance: 83fb3858-0c21-42f1-a815-f007bcdb8561] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 643.231018] env[69367]: DEBUG nova.network.neutron [None req-05f9ed91-7693-4818-a6f4-6a76f2db0941 tempest-ListImageFiltersTestJSON-127647720 tempest-ListImageFiltersTestJSON-127647720-project-member] [instance: 83fb3858-0c21-42f1-a815-f007bcdb8561] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 643.290146] env[69367]: INFO nova.compute.manager [-] [instance: 92bdb1b1-d8ab-46b2-9037-ee8fea4642ce] Took 1.83 seconds to deallocate network for instance. [ 643.383026] env[69367]: DEBUG oslo_vmware.api [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Task: {'id': task-4233781, 'name': PowerOnVM_Task, 'duration_secs': 0.65996} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.383683] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Powered on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 643.383942] env[69367]: DEBUG nova.compute.manager [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Checking state {{(pid=69367) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 643.385114] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b006553-46e4-4165-afb9-a4d0cd328c5f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.499233] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-537c1a31-e011-4906-a3fa-5c9a3ad748f6 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.508033] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7370d419-2443-4a3c-873a-e0dd6363d9ec {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.539840] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14e0239c-f8ca-47f5-9e79-b6ecc869ad26 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.549653] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d303296a-174f-47bb-b9a1-3f259c996035 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.566789] env[69367]: DEBUG nova.compute.provider_tree [None req-e1cc102c-7874-4dd6-88e4-3f9c2c2d9197 tempest-ListImageFiltersTestJSON-127647720 tempest-ListImageFiltersTestJSON-127647720-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 643.657200] env[69367]: DEBUG nova.compute.manager [req-3da09299-65b2-4504-b58b-d1d48778270a req-35bf32c4-0bf4-49e7-b317-e8f8ce66474d service nova] [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] Received event network-changed-013efad5-0b57-43e9-b662-10e31d24d8af {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 643.657627] env[69367]: DEBUG nova.compute.manager [req-3da09299-65b2-4504-b58b-d1d48778270a req-35bf32c4-0bf4-49e7-b317-e8f8ce66474d service nova] [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] Refreshing instance network info cache due to event network-changed-013efad5-0b57-43e9-b662-10e31d24d8af. {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11772}} [ 643.657988] env[69367]: DEBUG oslo_concurrency.lockutils [req-3da09299-65b2-4504-b58b-d1d48778270a req-35bf32c4-0bf4-49e7-b317-e8f8ce66474d service nova] Acquiring lock "refresh_cache-e1c7d100-4ad7-4871-970f-bb7562bfc6fc" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 643.658294] env[69367]: DEBUG oslo_concurrency.lockutils [req-3da09299-65b2-4504-b58b-d1d48778270a req-35bf32c4-0bf4-49e7-b317-e8f8ce66474d service nova] Acquired lock "refresh_cache-e1c7d100-4ad7-4871-970f-bb7562bfc6fc" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 643.658945] env[69367]: DEBUG nova.network.neutron [req-3da09299-65b2-4504-b58b-d1d48778270a req-35bf32c4-0bf4-49e7-b317-e8f8ce66474d service nova] [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] Refreshing network info cache for port 013efad5-0b57-43e9-b662-10e31d24d8af {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 643.670463] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2aa8ff71-f61c-43db-af35-540a770a2ca5 tempest-MigrationsAdminTest-1077393646 tempest-MigrationsAdminTest-1077393646-project-member] Lock "db11f64c-0881-4a06-ba8d-6f52ec7fab16" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.821s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 643.733055] env[69367]: DEBUG nova.network.neutron [None req-05f9ed91-7693-4818-a6f4-6a76f2db0941 tempest-ListImageFiltersTestJSON-127647720 tempest-ListImageFiltersTestJSON-127647720-project-member] [instance: 83fb3858-0c21-42f1-a815-f007bcdb8561] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 643.799131] env[69367]: DEBUG oslo_concurrency.lockutils [None req-da89b7a9-d8ce-4d71-a741-e22d0c2a0166 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 643.918762] env[69367]: DEBUG oslo_concurrency.lockutils [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 644.099917] env[69367]: ERROR nova.scheduler.client.report [None req-e1cc102c-7874-4dd6-88e4-3f9c2c2d9197 tempest-ListImageFiltersTestJSON-127647720 tempest-ListImageFiltersTestJSON-127647720-project-member] [req-2f8d0b65-b65a-4ed7-a827-557d5693b37b] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-2f8d0b65-b65a-4ed7-a827-557d5693b37b"}]} [ 644.100581] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e1cc102c-7874-4dd6-88e4-3f9c2c2d9197 tempest-ListImageFiltersTestJSON-127647720 tempest-ListImageFiltersTestJSON-127647720-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.082s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 644.101502] env[69367]: ERROR nova.compute.manager [None req-e1cc102c-7874-4dd6-88e4-3f9c2c2d9197 tempest-ListImageFiltersTestJSON-127647720 tempest-ListImageFiltersTestJSON-127647720-project-member] [instance: 5994e782-02fc-47a9-81f8-aa4b6d9fec4b] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 644.101502] env[69367]: ERROR nova.compute.manager [instance: 5994e782-02fc-47a9-81f8-aa4b6d9fec4b] Traceback (most recent call last): [ 644.101502] env[69367]: ERROR nova.compute.manager [instance: 5994e782-02fc-47a9-81f8-aa4b6d9fec4b] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 644.101502] env[69367]: ERROR nova.compute.manager [instance: 5994e782-02fc-47a9-81f8-aa4b6d9fec4b] yield [ 644.101502] env[69367]: ERROR nova.compute.manager [instance: 5994e782-02fc-47a9-81f8-aa4b6d9fec4b] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 644.101502] env[69367]: ERROR nova.compute.manager [instance: 5994e782-02fc-47a9-81f8-aa4b6d9fec4b] self.set_inventory_for_provider( [ 644.101502] env[69367]: ERROR nova.compute.manager [instance: 5994e782-02fc-47a9-81f8-aa4b6d9fec4b] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 644.101502] env[69367]: ERROR nova.compute.manager [instance: 5994e782-02fc-47a9-81f8-aa4b6d9fec4b] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 644.101844] env[69367]: ERROR nova.compute.manager [instance: 5994e782-02fc-47a9-81f8-aa4b6d9fec4b] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-2f8d0b65-b65a-4ed7-a827-557d5693b37b"}]} [ 644.101844] env[69367]: ERROR nova.compute.manager [instance: 5994e782-02fc-47a9-81f8-aa4b6d9fec4b] [ 644.101844] env[69367]: ERROR nova.compute.manager [instance: 5994e782-02fc-47a9-81f8-aa4b6d9fec4b] During handling of the above exception, another exception occurred: [ 644.101844] env[69367]: ERROR nova.compute.manager [instance: 5994e782-02fc-47a9-81f8-aa4b6d9fec4b] [ 644.101844] env[69367]: ERROR nova.compute.manager [instance: 5994e782-02fc-47a9-81f8-aa4b6d9fec4b] Traceback (most recent call last): [ 644.101844] env[69367]: ERROR nova.compute.manager [instance: 5994e782-02fc-47a9-81f8-aa4b6d9fec4b] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 644.101844] env[69367]: ERROR nova.compute.manager [instance: 5994e782-02fc-47a9-81f8-aa4b6d9fec4b] with self.rt.instance_claim(context, instance, node, allocs, [ 644.101844] env[69367]: ERROR nova.compute.manager [instance: 5994e782-02fc-47a9-81f8-aa4b6d9fec4b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 644.101844] env[69367]: ERROR nova.compute.manager [instance: 5994e782-02fc-47a9-81f8-aa4b6d9fec4b] return f(*args, **kwargs) [ 644.102178] env[69367]: ERROR nova.compute.manager [instance: 5994e782-02fc-47a9-81f8-aa4b6d9fec4b] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 644.102178] env[69367]: ERROR nova.compute.manager [instance: 5994e782-02fc-47a9-81f8-aa4b6d9fec4b] self._update(elevated, cn) [ 644.102178] env[69367]: ERROR nova.compute.manager [instance: 5994e782-02fc-47a9-81f8-aa4b6d9fec4b] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 644.102178] env[69367]: ERROR nova.compute.manager [instance: 5994e782-02fc-47a9-81f8-aa4b6d9fec4b] self._update_to_placement(context, compute_node, startup) [ 644.102178] env[69367]: ERROR nova.compute.manager [instance: 5994e782-02fc-47a9-81f8-aa4b6d9fec4b] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 644.102178] env[69367]: ERROR nova.compute.manager [instance: 5994e782-02fc-47a9-81f8-aa4b6d9fec4b] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 644.102178] env[69367]: ERROR nova.compute.manager [instance: 5994e782-02fc-47a9-81f8-aa4b6d9fec4b] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 644.102178] env[69367]: ERROR nova.compute.manager [instance: 5994e782-02fc-47a9-81f8-aa4b6d9fec4b] return attempt.get(self._wrap_exception) [ 644.102178] env[69367]: ERROR nova.compute.manager [instance: 5994e782-02fc-47a9-81f8-aa4b6d9fec4b] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 644.102178] env[69367]: ERROR nova.compute.manager [instance: 5994e782-02fc-47a9-81f8-aa4b6d9fec4b] six.reraise(self.value[0], self.value[1], self.value[2]) [ 644.102178] env[69367]: ERROR nova.compute.manager [instance: 5994e782-02fc-47a9-81f8-aa4b6d9fec4b] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 644.102178] env[69367]: ERROR nova.compute.manager [instance: 5994e782-02fc-47a9-81f8-aa4b6d9fec4b] raise value [ 644.102178] env[69367]: ERROR nova.compute.manager [instance: 5994e782-02fc-47a9-81f8-aa4b6d9fec4b] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 644.102546] env[69367]: ERROR nova.compute.manager [instance: 5994e782-02fc-47a9-81f8-aa4b6d9fec4b] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 644.102546] env[69367]: ERROR nova.compute.manager [instance: 5994e782-02fc-47a9-81f8-aa4b6d9fec4b] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 644.102546] env[69367]: ERROR nova.compute.manager [instance: 5994e782-02fc-47a9-81f8-aa4b6d9fec4b] self.reportclient.update_from_provider_tree( [ 644.102546] env[69367]: ERROR nova.compute.manager [instance: 5994e782-02fc-47a9-81f8-aa4b6d9fec4b] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 644.102546] env[69367]: ERROR nova.compute.manager [instance: 5994e782-02fc-47a9-81f8-aa4b6d9fec4b] with catch_all(pd.uuid): [ 644.102546] env[69367]: ERROR nova.compute.manager [instance: 5994e782-02fc-47a9-81f8-aa4b6d9fec4b] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 644.102546] env[69367]: ERROR nova.compute.manager [instance: 5994e782-02fc-47a9-81f8-aa4b6d9fec4b] self.gen.throw(typ, value, traceback) [ 644.102546] env[69367]: ERROR nova.compute.manager [instance: 5994e782-02fc-47a9-81f8-aa4b6d9fec4b] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 644.102546] env[69367]: ERROR nova.compute.manager [instance: 5994e782-02fc-47a9-81f8-aa4b6d9fec4b] raise exception.ResourceProviderSyncFailed() [ 644.102546] env[69367]: ERROR nova.compute.manager [instance: 5994e782-02fc-47a9-81f8-aa4b6d9fec4b] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 644.102546] env[69367]: ERROR nova.compute.manager [instance: 5994e782-02fc-47a9-81f8-aa4b6d9fec4b] [ 644.108063] env[69367]: DEBUG nova.compute.utils [None req-e1cc102c-7874-4dd6-88e4-3f9c2c2d9197 tempest-ListImageFiltersTestJSON-127647720 tempest-ListImageFiltersTestJSON-127647720-project-member] [instance: 5994e782-02fc-47a9-81f8-aa4b6d9fec4b] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 644.108063] env[69367]: DEBUG oslo_concurrency.lockutils [None req-37f23c14-fa8a-4512-b66e-872b851a765a tempest-ImagesOneServerNegativeTestJSON-435923336 tempest-ImagesOneServerNegativeTestJSON-435923336-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.661s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 644.108257] env[69367]: INFO nova.compute.claims [None req-37f23c14-fa8a-4512-b66e-872b851a765a tempest-ImagesOneServerNegativeTestJSON-435923336 tempest-ImagesOneServerNegativeTestJSON-435923336-project-member] [instance: 4bef75af-bbe8-4c6e-8c06-9c827ece1134] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 644.111361] env[69367]: DEBUG nova.compute.manager [None req-e1cc102c-7874-4dd6-88e4-3f9c2c2d9197 tempest-ListImageFiltersTestJSON-127647720 tempest-ListImageFiltersTestJSON-127647720-project-member] [instance: 5994e782-02fc-47a9-81f8-aa4b6d9fec4b] Build of instance 5994e782-02fc-47a9-81f8-aa4b6d9fec4b was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 644.111766] env[69367]: DEBUG nova.compute.manager [None req-e1cc102c-7874-4dd6-88e4-3f9c2c2d9197 tempest-ListImageFiltersTestJSON-127647720 tempest-ListImageFiltersTestJSON-127647720-project-member] [instance: 5994e782-02fc-47a9-81f8-aa4b6d9fec4b] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 644.111766] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e1cc102c-7874-4dd6-88e4-3f9c2c2d9197 tempest-ListImageFiltersTestJSON-127647720 tempest-ListImageFiltersTestJSON-127647720-project-member] Acquiring lock "refresh_cache-5994e782-02fc-47a9-81f8-aa4b6d9fec4b" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 644.116024] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e1cc102c-7874-4dd6-88e4-3f9c2c2d9197 tempest-ListImageFiltersTestJSON-127647720 tempest-ListImageFiltersTestJSON-127647720-project-member] Acquired lock "refresh_cache-5994e782-02fc-47a9-81f8-aa4b6d9fec4b" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 644.116024] env[69367]: DEBUG nova.network.neutron [None req-e1cc102c-7874-4dd6-88e4-3f9c2c2d9197 tempest-ListImageFiltersTestJSON-127647720 tempest-ListImageFiltersTestJSON-127647720-project-member] [instance: 5994e782-02fc-47a9-81f8-aa4b6d9fec4b] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 644.178451] env[69367]: DEBUG nova.compute.manager [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 644.236538] env[69367]: INFO nova.compute.manager [None req-05f9ed91-7693-4818-a6f4-6a76f2db0941 tempest-ListImageFiltersTestJSON-127647720 tempest-ListImageFiltersTestJSON-127647720-project-member] [instance: 83fb3858-0c21-42f1-a815-f007bcdb8561] Took 1.04 seconds to deallocate network for instance. [ 644.589065] env[69367]: DEBUG nova.compute.manager [req-51c6bcac-fc3e-4de2-8012-6499ed512968 req-3b1061b4-5b24-466f-82d7-d2e106ee7749 service nova] [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] Received event network-changed-b6a0688d-a5a2-4937-9ac7-25b53f9b001d {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 644.589668] env[69367]: DEBUG nova.compute.manager [req-51c6bcac-fc3e-4de2-8012-6499ed512968 req-3b1061b4-5b24-466f-82d7-d2e106ee7749 service nova] [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] Refreshing instance network info cache due to event network-changed-b6a0688d-a5a2-4937-9ac7-25b53f9b001d. {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11772}} [ 644.590222] env[69367]: DEBUG oslo_concurrency.lockutils [req-51c6bcac-fc3e-4de2-8012-6499ed512968 req-3b1061b4-5b24-466f-82d7-d2e106ee7749 service nova] Acquiring lock "refresh_cache-3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 644.590222] env[69367]: DEBUG oslo_concurrency.lockutils [req-51c6bcac-fc3e-4de2-8012-6499ed512968 req-3b1061b4-5b24-466f-82d7-d2e106ee7749 service nova] Acquired lock "refresh_cache-3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 644.590363] env[69367]: DEBUG nova.network.neutron [req-51c6bcac-fc3e-4de2-8012-6499ed512968 req-3b1061b4-5b24-466f-82d7-d2e106ee7749 service nova] [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] Refreshing network info cache for port b6a0688d-a5a2-4937-9ac7-25b53f9b001d {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 644.673812] env[69367]: DEBUG nova.network.neutron [None req-e1cc102c-7874-4dd6-88e4-3f9c2c2d9197 tempest-ListImageFiltersTestJSON-127647720 tempest-ListImageFiltersTestJSON-127647720-project-member] [instance: 5994e782-02fc-47a9-81f8-aa4b6d9fec4b] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 644.717124] env[69367]: DEBUG oslo_concurrency.lockutils [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 644.902606] env[69367]: DEBUG nova.network.neutron [None req-e1cc102c-7874-4dd6-88e4-3f9c2c2d9197 tempest-ListImageFiltersTestJSON-127647720 tempest-ListImageFiltersTestJSON-127647720-project-member] [instance: 5994e782-02fc-47a9-81f8-aa4b6d9fec4b] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 645.024953] env[69367]: DEBUG nova.network.neutron [req-3da09299-65b2-4504-b58b-d1d48778270a req-35bf32c4-0bf4-49e7-b317-e8f8ce66474d service nova] [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] Updated VIF entry in instance network info cache for port 013efad5-0b57-43e9-b662-10e31d24d8af. {{(pid=69367) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 645.026734] env[69367]: DEBUG nova.network.neutron [req-3da09299-65b2-4504-b58b-d1d48778270a req-35bf32c4-0bf4-49e7-b317-e8f8ce66474d service nova] [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] Updating instance_info_cache with network_info: [{"id": "013efad5-0b57-43e9-b662-10e31d24d8af", "address": "fa:16:3e:1a:c6:8e", "network": {"id": "dd68ce65-5682-4b4c-913c-cf699d2146be", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-341319856-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2375d6603eef45069be4a3541519002a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2c019b6-3ef3-4c8f-95bd-edede2c554a9", "external-id": "nsx-vlan-transportzone-364", "segmentation_id": 364, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap013efad5-0b", "ovs_interfaceid": "013efad5-0b57-43e9-b662-10e31d24d8af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 645.154324] env[69367]: DEBUG nova.scheduler.client.report [None req-37f23c14-fa8a-4512-b66e-872b851a765a tempest-ImagesOneServerNegativeTestJSON-435923336 tempest-ImagesOneServerNegativeTestJSON-435923336-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 645.184885] env[69367]: DEBUG nova.scheduler.client.report [None req-37f23c14-fa8a-4512-b66e-872b851a765a tempest-ImagesOneServerNegativeTestJSON-435923336 tempest-ImagesOneServerNegativeTestJSON-435923336-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 645.185327] env[69367]: DEBUG nova.compute.provider_tree [None req-37f23c14-fa8a-4512-b66e-872b851a765a tempest-ImagesOneServerNegativeTestJSON-435923336 tempest-ImagesOneServerNegativeTestJSON-435923336-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 645.206380] env[69367]: DEBUG nova.scheduler.client.report [None req-37f23c14-fa8a-4512-b66e-872b851a765a tempest-ImagesOneServerNegativeTestJSON-435923336 tempest-ImagesOneServerNegativeTestJSON-435923336-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 645.233111] env[69367]: DEBUG nova.scheduler.client.report [None req-37f23c14-fa8a-4512-b66e-872b851a765a tempest-ImagesOneServerNegativeTestJSON-435923336 tempest-ImagesOneServerNegativeTestJSON-435923336-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 645.291105] env[69367]: INFO nova.scheduler.client.report [None req-05f9ed91-7693-4818-a6f4-6a76f2db0941 tempest-ListImageFiltersTestJSON-127647720 tempest-ListImageFiltersTestJSON-127647720-project-member] Deleted allocations for instance 83fb3858-0c21-42f1-a815-f007bcdb8561 [ 645.367909] env[69367]: DEBUG oslo_concurrency.lockutils [None req-feb72e4c-b567-4182-bef0-66f7e1a58d17 tempest-ServerDiagnosticsNegativeTest-1764795038 tempest-ServerDiagnosticsNegativeTest-1764795038-project-member] Acquiring lock "c7bc6ebd-d7fd-439a-829f-8f4bf2065623" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 645.368229] env[69367]: DEBUG oslo_concurrency.lockutils [None req-feb72e4c-b567-4182-bef0-66f7e1a58d17 tempest-ServerDiagnosticsNegativeTest-1764795038 tempest-ServerDiagnosticsNegativeTest-1764795038-project-member] Lock "c7bc6ebd-d7fd-439a-829f-8f4bf2065623" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 645.405066] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e1cc102c-7874-4dd6-88e4-3f9c2c2d9197 tempest-ListImageFiltersTestJSON-127647720 tempest-ListImageFiltersTestJSON-127647720-project-member] Releasing lock "refresh_cache-5994e782-02fc-47a9-81f8-aa4b6d9fec4b" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 645.405353] env[69367]: DEBUG nova.compute.manager [None req-e1cc102c-7874-4dd6-88e4-3f9c2c2d9197 tempest-ListImageFiltersTestJSON-127647720 tempest-ListImageFiltersTestJSON-127647720-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 645.405563] env[69367]: DEBUG nova.compute.manager [None req-e1cc102c-7874-4dd6-88e4-3f9c2c2d9197 tempest-ListImageFiltersTestJSON-127647720 tempest-ListImageFiltersTestJSON-127647720-project-member] [instance: 5994e782-02fc-47a9-81f8-aa4b6d9fec4b] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 645.405720] env[69367]: DEBUG nova.network.neutron [None req-e1cc102c-7874-4dd6-88e4-3f9c2c2d9197 tempest-ListImageFiltersTestJSON-127647720 tempest-ListImageFiltersTestJSON-127647720-project-member] [instance: 5994e782-02fc-47a9-81f8-aa4b6d9fec4b] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 645.452690] env[69367]: DEBUG nova.network.neutron [None req-e1cc102c-7874-4dd6-88e4-3f9c2c2d9197 tempest-ListImageFiltersTestJSON-127647720 tempest-ListImageFiltersTestJSON-127647720-project-member] [instance: 5994e782-02fc-47a9-81f8-aa4b6d9fec4b] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 645.531641] env[69367]: DEBUG oslo_concurrency.lockutils [req-3da09299-65b2-4504-b58b-d1d48778270a req-35bf32c4-0bf4-49e7-b317-e8f8ce66474d service nova] Releasing lock "refresh_cache-e1c7d100-4ad7-4871-970f-bb7562bfc6fc" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 645.657153] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-112088d7-3424-44be-a46f-46e27a1b03a7 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.666155] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27fc4178-3ef3-47e3-bdcb-61c04da2ff64 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.707236] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-110a70d7-629e-450d-a67c-ad061655b738 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.717252] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef957802-861d-4387-bcf7-bcf01639ae9c {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.735303] env[69367]: DEBUG nova.compute.provider_tree [None req-37f23c14-fa8a-4512-b66e-872b851a765a tempest-ImagesOneServerNegativeTestJSON-435923336 tempest-ImagesOneServerNegativeTestJSON-435923336-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 645.800360] env[69367]: DEBUG oslo_concurrency.lockutils [None req-05f9ed91-7693-4818-a6f4-6a76f2db0941 tempest-ListImageFiltersTestJSON-127647720 tempest-ListImageFiltersTestJSON-127647720-project-member] Lock "83fb3858-0c21-42f1-a815-f007bcdb8561" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.508s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 645.957159] env[69367]: DEBUG nova.network.neutron [None req-e1cc102c-7874-4dd6-88e4-3f9c2c2d9197 tempest-ListImageFiltersTestJSON-127647720 tempest-ListImageFiltersTestJSON-127647720-project-member] [instance: 5994e782-02fc-47a9-81f8-aa4b6d9fec4b] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 646.093960] env[69367]: DEBUG nova.network.neutron [req-51c6bcac-fc3e-4de2-8012-6499ed512968 req-3b1061b4-5b24-466f-82d7-d2e106ee7749 service nova] [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] Updated VIF entry in instance network info cache for port b6a0688d-a5a2-4937-9ac7-25b53f9b001d. {{(pid=69367) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 646.094833] env[69367]: DEBUG nova.network.neutron [req-51c6bcac-fc3e-4de2-8012-6499ed512968 req-3b1061b4-5b24-466f-82d7-d2e106ee7749 service nova] [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] Updating instance_info_cache with network_info: [{"id": "b6a0688d-a5a2-4937-9ac7-25b53f9b001d", "address": "fa:16:3e:a7:29:36", "network": {"id": "346c9326-4d8c-4f7c-b346-ea12f5dd891e", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1718591398-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.199", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "06c46b0af1af4a788c5e7159fc2daa3d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0ef5aba-bd9a-42ff-a1a0-5e763986d70a", "external-id": "nsx-vlan-transportzone-209", "segmentation_id": 209, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb6a0688d-a5", "ovs_interfaceid": "b6a0688d-a5a2-4937-9ac7-25b53f9b001d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 646.265294] env[69367]: ERROR nova.scheduler.client.report [None req-37f23c14-fa8a-4512-b66e-872b851a765a tempest-ImagesOneServerNegativeTestJSON-435923336 tempest-ImagesOneServerNegativeTestJSON-435923336-project-member] [req-c9a6220b-dadf-4fd9-b2b5-2540f6caeb3d] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-c9a6220b-dadf-4fd9-b2b5-2540f6caeb3d"}]} [ 646.265791] env[69367]: DEBUG oslo_concurrency.lockutils [None req-37f23c14-fa8a-4512-b66e-872b851a765a tempest-ImagesOneServerNegativeTestJSON-435923336 tempest-ImagesOneServerNegativeTestJSON-435923336-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.159s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 646.266493] env[69367]: ERROR nova.compute.manager [None req-37f23c14-fa8a-4512-b66e-872b851a765a tempest-ImagesOneServerNegativeTestJSON-435923336 tempest-ImagesOneServerNegativeTestJSON-435923336-project-member] [instance: 4bef75af-bbe8-4c6e-8c06-9c827ece1134] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 646.266493] env[69367]: ERROR nova.compute.manager [instance: 4bef75af-bbe8-4c6e-8c06-9c827ece1134] Traceback (most recent call last): [ 646.266493] env[69367]: ERROR nova.compute.manager [instance: 4bef75af-bbe8-4c6e-8c06-9c827ece1134] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 646.266493] env[69367]: ERROR nova.compute.manager [instance: 4bef75af-bbe8-4c6e-8c06-9c827ece1134] yield [ 646.266493] env[69367]: ERROR nova.compute.manager [instance: 4bef75af-bbe8-4c6e-8c06-9c827ece1134] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 646.266493] env[69367]: ERROR nova.compute.manager [instance: 4bef75af-bbe8-4c6e-8c06-9c827ece1134] self.set_inventory_for_provider( [ 646.266493] env[69367]: ERROR nova.compute.manager [instance: 4bef75af-bbe8-4c6e-8c06-9c827ece1134] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 646.266493] env[69367]: ERROR nova.compute.manager [instance: 4bef75af-bbe8-4c6e-8c06-9c827ece1134] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 646.266795] env[69367]: ERROR nova.compute.manager [instance: 4bef75af-bbe8-4c6e-8c06-9c827ece1134] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-c9a6220b-dadf-4fd9-b2b5-2540f6caeb3d"}]} [ 646.266795] env[69367]: ERROR nova.compute.manager [instance: 4bef75af-bbe8-4c6e-8c06-9c827ece1134] [ 646.266795] env[69367]: ERROR nova.compute.manager [instance: 4bef75af-bbe8-4c6e-8c06-9c827ece1134] During handling of the above exception, another exception occurred: [ 646.266795] env[69367]: ERROR nova.compute.manager [instance: 4bef75af-bbe8-4c6e-8c06-9c827ece1134] [ 646.266795] env[69367]: ERROR nova.compute.manager [instance: 4bef75af-bbe8-4c6e-8c06-9c827ece1134] Traceback (most recent call last): [ 646.266795] env[69367]: ERROR nova.compute.manager [instance: 4bef75af-bbe8-4c6e-8c06-9c827ece1134] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 646.266795] env[69367]: ERROR nova.compute.manager [instance: 4bef75af-bbe8-4c6e-8c06-9c827ece1134] with self.rt.instance_claim(context, instance, node, allocs, [ 646.266795] env[69367]: ERROR nova.compute.manager [instance: 4bef75af-bbe8-4c6e-8c06-9c827ece1134] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 646.266795] env[69367]: ERROR nova.compute.manager [instance: 4bef75af-bbe8-4c6e-8c06-9c827ece1134] return f(*args, **kwargs) [ 646.267119] env[69367]: ERROR nova.compute.manager [instance: 4bef75af-bbe8-4c6e-8c06-9c827ece1134] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 646.267119] env[69367]: ERROR nova.compute.manager [instance: 4bef75af-bbe8-4c6e-8c06-9c827ece1134] self._update(elevated, cn) [ 646.267119] env[69367]: ERROR nova.compute.manager [instance: 4bef75af-bbe8-4c6e-8c06-9c827ece1134] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 646.267119] env[69367]: ERROR nova.compute.manager [instance: 4bef75af-bbe8-4c6e-8c06-9c827ece1134] self._update_to_placement(context, compute_node, startup) [ 646.267119] env[69367]: ERROR nova.compute.manager [instance: 4bef75af-bbe8-4c6e-8c06-9c827ece1134] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 646.267119] env[69367]: ERROR nova.compute.manager [instance: 4bef75af-bbe8-4c6e-8c06-9c827ece1134] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 646.267119] env[69367]: ERROR nova.compute.manager [instance: 4bef75af-bbe8-4c6e-8c06-9c827ece1134] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 646.267119] env[69367]: ERROR nova.compute.manager [instance: 4bef75af-bbe8-4c6e-8c06-9c827ece1134] return attempt.get(self._wrap_exception) [ 646.267119] env[69367]: ERROR nova.compute.manager [instance: 4bef75af-bbe8-4c6e-8c06-9c827ece1134] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 646.267119] env[69367]: ERROR nova.compute.manager [instance: 4bef75af-bbe8-4c6e-8c06-9c827ece1134] six.reraise(self.value[0], self.value[1], self.value[2]) [ 646.267119] env[69367]: ERROR nova.compute.manager [instance: 4bef75af-bbe8-4c6e-8c06-9c827ece1134] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 646.267119] env[69367]: ERROR nova.compute.manager [instance: 4bef75af-bbe8-4c6e-8c06-9c827ece1134] raise value [ 646.267119] env[69367]: ERROR nova.compute.manager [instance: 4bef75af-bbe8-4c6e-8c06-9c827ece1134] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 646.267514] env[69367]: ERROR nova.compute.manager [instance: 4bef75af-bbe8-4c6e-8c06-9c827ece1134] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 646.267514] env[69367]: ERROR nova.compute.manager [instance: 4bef75af-bbe8-4c6e-8c06-9c827ece1134] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 646.267514] env[69367]: ERROR nova.compute.manager [instance: 4bef75af-bbe8-4c6e-8c06-9c827ece1134] self.reportclient.update_from_provider_tree( [ 646.267514] env[69367]: ERROR nova.compute.manager [instance: 4bef75af-bbe8-4c6e-8c06-9c827ece1134] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 646.267514] env[69367]: ERROR nova.compute.manager [instance: 4bef75af-bbe8-4c6e-8c06-9c827ece1134] with catch_all(pd.uuid): [ 646.267514] env[69367]: ERROR nova.compute.manager [instance: 4bef75af-bbe8-4c6e-8c06-9c827ece1134] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 646.267514] env[69367]: ERROR nova.compute.manager [instance: 4bef75af-bbe8-4c6e-8c06-9c827ece1134] self.gen.throw(typ, value, traceback) [ 646.267514] env[69367]: ERROR nova.compute.manager [instance: 4bef75af-bbe8-4c6e-8c06-9c827ece1134] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 646.267514] env[69367]: ERROR nova.compute.manager [instance: 4bef75af-bbe8-4c6e-8c06-9c827ece1134] raise exception.ResourceProviderSyncFailed() [ 646.267514] env[69367]: ERROR nova.compute.manager [instance: 4bef75af-bbe8-4c6e-8c06-9c827ece1134] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 646.267514] env[69367]: ERROR nova.compute.manager [instance: 4bef75af-bbe8-4c6e-8c06-9c827ece1134] [ 646.267960] env[69367]: DEBUG nova.compute.utils [None req-37f23c14-fa8a-4512-b66e-872b851a765a tempest-ImagesOneServerNegativeTestJSON-435923336 tempest-ImagesOneServerNegativeTestJSON-435923336-project-member] [instance: 4bef75af-bbe8-4c6e-8c06-9c827ece1134] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 646.271775] env[69367]: DEBUG oslo_concurrency.lockutils [None req-7b94712f-24c6-4e7c-bb81-62b66d431601 tempest-DeleteServersAdminTestJSON-676728523 tempest-DeleteServersAdminTestJSON-676728523-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.968s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 646.271775] env[69367]: DEBUG nova.objects.instance [None req-7b94712f-24c6-4e7c-bb81-62b66d431601 tempest-DeleteServersAdminTestJSON-676728523 tempest-DeleteServersAdminTestJSON-676728523-project-admin] Lazy-loading 'resources' on Instance uuid 5c7b2127-e875-4222-8148-a2ea60631c25 {{(pid=69367) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 646.273374] env[69367]: DEBUG nova.compute.manager [None req-37f23c14-fa8a-4512-b66e-872b851a765a tempest-ImagesOneServerNegativeTestJSON-435923336 tempest-ImagesOneServerNegativeTestJSON-435923336-project-member] [instance: 4bef75af-bbe8-4c6e-8c06-9c827ece1134] Build of instance 4bef75af-bbe8-4c6e-8c06-9c827ece1134 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 646.273374] env[69367]: DEBUG nova.compute.manager [None req-37f23c14-fa8a-4512-b66e-872b851a765a tempest-ImagesOneServerNegativeTestJSON-435923336 tempest-ImagesOneServerNegativeTestJSON-435923336-project-member] [instance: 4bef75af-bbe8-4c6e-8c06-9c827ece1134] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 646.273374] env[69367]: DEBUG oslo_concurrency.lockutils [None req-37f23c14-fa8a-4512-b66e-872b851a765a tempest-ImagesOneServerNegativeTestJSON-435923336 tempest-ImagesOneServerNegativeTestJSON-435923336-project-member] Acquiring lock "refresh_cache-4bef75af-bbe8-4c6e-8c06-9c827ece1134" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 646.273374] env[69367]: DEBUG oslo_concurrency.lockutils [None req-37f23c14-fa8a-4512-b66e-872b851a765a tempest-ImagesOneServerNegativeTestJSON-435923336 tempest-ImagesOneServerNegativeTestJSON-435923336-project-member] Acquired lock "refresh_cache-4bef75af-bbe8-4c6e-8c06-9c827ece1134" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 646.273638] env[69367]: DEBUG nova.network.neutron [None req-37f23c14-fa8a-4512-b66e-872b851a765a tempest-ImagesOneServerNegativeTestJSON-435923336 tempest-ImagesOneServerNegativeTestJSON-435923336-project-member] [instance: 4bef75af-bbe8-4c6e-8c06-9c827ece1134] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 646.303861] env[69367]: DEBUG nova.compute.manager [None req-0cf850bb-f4f3-4902-82da-ca6e283aae47 tempest-FloatingIPsAssociationNegativeTestJSON-1638525184 tempest-FloatingIPsAssociationNegativeTestJSON-1638525184-project-member] [instance: 8001cca4-9b9f-4425-b6e4-d27866395886] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 646.463678] env[69367]: INFO nova.compute.manager [None req-e1cc102c-7874-4dd6-88e4-3f9c2c2d9197 tempest-ListImageFiltersTestJSON-127647720 tempest-ListImageFiltersTestJSON-127647720-project-member] [instance: 5994e782-02fc-47a9-81f8-aa4b6d9fec4b] Took 1.06 seconds to deallocate network for instance. [ 646.599379] env[69367]: DEBUG oslo_concurrency.lockutils [req-51c6bcac-fc3e-4de2-8012-6499ed512968 req-3b1061b4-5b24-466f-82d7-d2e106ee7749 service nova] Releasing lock "refresh_cache-3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 646.810091] env[69367]: DEBUG nova.scheduler.client.report [None req-7b94712f-24c6-4e7c-bb81-62b66d431601 tempest-DeleteServersAdminTestJSON-676728523 tempest-DeleteServersAdminTestJSON-676728523-project-admin] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 646.829903] env[69367]: DEBUG nova.scheduler.client.report [None req-7b94712f-24c6-4e7c-bb81-62b66d431601 tempest-DeleteServersAdminTestJSON-676728523 tempest-DeleteServersAdminTestJSON-676728523-project-admin] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 646.831585] env[69367]: DEBUG nova.compute.provider_tree [None req-7b94712f-24c6-4e7c-bb81-62b66d431601 tempest-DeleteServersAdminTestJSON-676728523 tempest-DeleteServersAdminTestJSON-676728523-project-admin] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 646.835321] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0cf850bb-f4f3-4902-82da-ca6e283aae47 tempest-FloatingIPsAssociationNegativeTestJSON-1638525184 tempest-FloatingIPsAssociationNegativeTestJSON-1638525184-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 646.840566] env[69367]: DEBUG nova.network.neutron [None req-37f23c14-fa8a-4512-b66e-872b851a765a tempest-ImagesOneServerNegativeTestJSON-435923336 tempest-ImagesOneServerNegativeTestJSON-435923336-project-member] [instance: 4bef75af-bbe8-4c6e-8c06-9c827ece1134] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 646.843376] env[69367]: DEBUG nova.scheduler.client.report [None req-7b94712f-24c6-4e7c-bb81-62b66d431601 tempest-DeleteServersAdminTestJSON-676728523 tempest-DeleteServersAdminTestJSON-676728523-project-admin] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 646.876614] env[69367]: DEBUG nova.scheduler.client.report [None req-7b94712f-24c6-4e7c-bb81-62b66d431601 tempest-DeleteServersAdminTestJSON-676728523 tempest-DeleteServersAdminTestJSON-676728523-project-admin] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 647.097676] env[69367]: DEBUG nova.network.neutron [None req-37f23c14-fa8a-4512-b66e-872b851a765a tempest-ImagesOneServerNegativeTestJSON-435923336 tempest-ImagesOneServerNegativeTestJSON-435923336-project-member] [instance: 4bef75af-bbe8-4c6e-8c06-9c827ece1134] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 647.337625] env[69367]: DEBUG nova.compute.manager [req-2f141050-a8aa-4a0d-bce7-ebdead312549 req-7107b22e-2bfe-4974-b757-ec7eea1ce01d service nova] [instance: 92bdb1b1-d8ab-46b2-9037-ee8fea4642ce] Received event network-vif-deleted-a7fe18e3-9f20-481e-b223-1b2907709041 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 647.348012] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fb6ed14-7957-41a9-a661-366383ee3a42 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.360302] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f361eed8-c13c-40b4-9741-b2e73c9da16f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.415449] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdb0e7bc-3d84-42f2-b647-c98867a50800 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.423516] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-301b0935-c872-443b-98ef-617abbd4aa55 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.445853] env[69367]: DEBUG nova.compute.provider_tree [None req-7b94712f-24c6-4e7c-bb81-62b66d431601 tempest-DeleteServersAdminTestJSON-676728523 tempest-DeleteServersAdminTestJSON-676728523-project-admin] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 647.512164] env[69367]: INFO nova.scheduler.client.report [None req-e1cc102c-7874-4dd6-88e4-3f9c2c2d9197 tempest-ListImageFiltersTestJSON-127647720 tempest-ListImageFiltersTestJSON-127647720-project-member] Deleted allocations for instance 5994e782-02fc-47a9-81f8-aa4b6d9fec4b [ 647.605978] env[69367]: DEBUG oslo_concurrency.lockutils [None req-37f23c14-fa8a-4512-b66e-872b851a765a tempest-ImagesOneServerNegativeTestJSON-435923336 tempest-ImagesOneServerNegativeTestJSON-435923336-project-member] Releasing lock "refresh_cache-4bef75af-bbe8-4c6e-8c06-9c827ece1134" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 647.606260] env[69367]: DEBUG nova.compute.manager [None req-37f23c14-fa8a-4512-b66e-872b851a765a tempest-ImagesOneServerNegativeTestJSON-435923336 tempest-ImagesOneServerNegativeTestJSON-435923336-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 647.606449] env[69367]: DEBUG nova.compute.manager [None req-37f23c14-fa8a-4512-b66e-872b851a765a tempest-ImagesOneServerNegativeTestJSON-435923336 tempest-ImagesOneServerNegativeTestJSON-435923336-project-member] [instance: 4bef75af-bbe8-4c6e-8c06-9c827ece1134] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 647.606619] env[69367]: DEBUG nova.network.neutron [None req-37f23c14-fa8a-4512-b66e-872b851a765a tempest-ImagesOneServerNegativeTestJSON-435923336 tempest-ImagesOneServerNegativeTestJSON-435923336-project-member] [instance: 4bef75af-bbe8-4c6e-8c06-9c827ece1134] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 647.644511] env[69367]: DEBUG nova.network.neutron [None req-37f23c14-fa8a-4512-b66e-872b851a765a tempest-ImagesOneServerNegativeTestJSON-435923336 tempest-ImagesOneServerNegativeTestJSON-435923336-project-member] [instance: 4bef75af-bbe8-4c6e-8c06-9c827ece1134] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 647.980150] env[69367]: ERROR nova.scheduler.client.report [None req-7b94712f-24c6-4e7c-bb81-62b66d431601 tempest-DeleteServersAdminTestJSON-676728523 tempest-DeleteServersAdminTestJSON-676728523-project-admin] [req-2e4857cc-da57-4430-afc3-2d8fb2610406] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-2e4857cc-da57-4430-afc3-2d8fb2610406"}]} [ 647.980615] env[69367]: DEBUG oslo_concurrency.lockutils [None req-7b94712f-24c6-4e7c-bb81-62b66d431601 tempest-DeleteServersAdminTestJSON-676728523 tempest-DeleteServersAdminTestJSON-676728523-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.711s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 647.982535] env[69367]: ERROR nova.compute.manager [None req-7b94712f-24c6-4e7c-bb81-62b66d431601 tempest-DeleteServersAdminTestJSON-676728523 tempest-DeleteServersAdminTestJSON-676728523-project-admin] [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] Setting instance vm_state to ERROR: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 647.982535] env[69367]: ERROR nova.compute.manager [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] Traceback (most recent call last): [ 647.982535] env[69367]: ERROR nova.compute.manager [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 647.982535] env[69367]: ERROR nova.compute.manager [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] yield [ 647.982535] env[69367]: ERROR nova.compute.manager [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 647.982535] env[69367]: ERROR nova.compute.manager [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] self.set_inventory_for_provider( [ 647.982535] env[69367]: ERROR nova.compute.manager [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 647.982535] env[69367]: ERROR nova.compute.manager [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 647.982880] env[69367]: ERROR nova.compute.manager [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-2e4857cc-da57-4430-afc3-2d8fb2610406"}]} [ 647.982880] env[69367]: ERROR nova.compute.manager [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] [ 647.982880] env[69367]: ERROR nova.compute.manager [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] During handling of the above exception, another exception occurred: [ 647.982880] env[69367]: ERROR nova.compute.manager [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] [ 647.982880] env[69367]: ERROR nova.compute.manager [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] Traceback (most recent call last): [ 647.982880] env[69367]: ERROR nova.compute.manager [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 647.982880] env[69367]: ERROR nova.compute.manager [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] self._delete_instance(context, instance, bdms) [ 647.982880] env[69367]: ERROR nova.compute.manager [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 647.982880] env[69367]: ERROR nova.compute.manager [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] self._complete_deletion(context, instance) [ 647.983184] env[69367]: ERROR nova.compute.manager [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 647.983184] env[69367]: ERROR nova.compute.manager [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] self._update_resource_tracker(context, instance) [ 647.983184] env[69367]: ERROR nova.compute.manager [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 647.983184] env[69367]: ERROR nova.compute.manager [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] self.rt.update_usage(context, instance, instance.node) [ 647.983184] env[69367]: ERROR nova.compute.manager [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 647.983184] env[69367]: ERROR nova.compute.manager [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] return f(*args, **kwargs) [ 647.983184] env[69367]: ERROR nova.compute.manager [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 647.983184] env[69367]: ERROR nova.compute.manager [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] self._update(context.elevated(), self.compute_nodes[nodename]) [ 647.983184] env[69367]: ERROR nova.compute.manager [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 647.983184] env[69367]: ERROR nova.compute.manager [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] self._update_to_placement(context, compute_node, startup) [ 647.983184] env[69367]: ERROR nova.compute.manager [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 647.983184] env[69367]: ERROR nova.compute.manager [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 647.983527] env[69367]: ERROR nova.compute.manager [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 647.983527] env[69367]: ERROR nova.compute.manager [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] return attempt.get(self._wrap_exception) [ 647.983527] env[69367]: ERROR nova.compute.manager [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 647.983527] env[69367]: ERROR nova.compute.manager [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] six.reraise(self.value[0], self.value[1], self.value[2]) [ 647.983527] env[69367]: ERROR nova.compute.manager [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 647.983527] env[69367]: ERROR nova.compute.manager [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] raise value [ 647.983527] env[69367]: ERROR nova.compute.manager [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 647.983527] env[69367]: ERROR nova.compute.manager [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 647.983527] env[69367]: ERROR nova.compute.manager [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 647.983527] env[69367]: ERROR nova.compute.manager [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] self.reportclient.update_from_provider_tree( [ 647.983527] env[69367]: ERROR nova.compute.manager [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 647.983527] env[69367]: ERROR nova.compute.manager [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] with catch_all(pd.uuid): [ 647.983527] env[69367]: ERROR nova.compute.manager [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 647.983894] env[69367]: ERROR nova.compute.manager [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] self.gen.throw(typ, value, traceback) [ 647.983894] env[69367]: ERROR nova.compute.manager [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 647.983894] env[69367]: ERROR nova.compute.manager [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] raise exception.ResourceProviderSyncFailed() [ 647.983894] env[69367]: ERROR nova.compute.manager [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 647.983894] env[69367]: ERROR nova.compute.manager [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] [ 647.987763] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0bdb360b-0170-4e12-942c-b15ee8c54cb9 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.416s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 647.987763] env[69367]: DEBUG nova.objects.instance [None req-0bdb360b-0170-4e12-942c-b15ee8c54cb9 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Lazy-loading 'resources' on Instance uuid a358ce6d-9826-4ddb-8c2f-51bac8db59d4 {{(pid=69367) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 648.010923] env[69367]: INFO nova.compute.manager [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Rebuilding instance [ 648.023896] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e1cc102c-7874-4dd6-88e4-3f9c2c2d9197 tempest-ListImageFiltersTestJSON-127647720 tempest-ListImageFiltersTestJSON-127647720-project-member] Lock "5994e782-02fc-47a9-81f8-aa4b6d9fec4b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.839s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 648.085345] env[69367]: DEBUG nova.compute.manager [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Checking state {{(pid=69367) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 648.086309] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74ac3baf-b140-4857-9360-51cc2257e860 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.148242] env[69367]: DEBUG nova.network.neutron [None req-37f23c14-fa8a-4512-b66e-872b851a765a tempest-ImagesOneServerNegativeTestJSON-435923336 tempest-ImagesOneServerNegativeTestJSON-435923336-project-member] [instance: 4bef75af-bbe8-4c6e-8c06-9c827ece1134] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 648.490213] env[69367]: DEBUG oslo_concurrency.lockutils [None req-7b94712f-24c6-4e7c-bb81-62b66d431601 tempest-DeleteServersAdminTestJSON-676728523 tempest-DeleteServersAdminTestJSON-676728523-project-admin] Lock "5c7b2127-e875-4222-8148-a2ea60631c25" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.144s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 648.515675] env[69367]: DEBUG nova.scheduler.client.report [None req-0bdb360b-0170-4e12-942c-b15ee8c54cb9 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 648.530019] env[69367]: DEBUG nova.compute.manager [None req-a34c9c6d-e38d-4163-9cc3-a39df0a2ad31 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] [instance: f66c0467-a408-4e56-abdf-2c19cc3d9c11] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 648.538344] env[69367]: DEBUG nova.scheduler.client.report [None req-0bdb360b-0170-4e12-942c-b15ee8c54cb9 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 648.538344] env[69367]: DEBUG nova.compute.provider_tree [None req-0bdb360b-0170-4e12-942c-b15ee8c54cb9 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 648.554249] env[69367]: DEBUG nova.scheduler.client.report [None req-0bdb360b-0170-4e12-942c-b15ee8c54cb9 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 648.591749] env[69367]: DEBUG nova.scheduler.client.report [None req-0bdb360b-0170-4e12-942c-b15ee8c54cb9 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 648.654579] env[69367]: INFO nova.compute.manager [None req-37f23c14-fa8a-4512-b66e-872b851a765a tempest-ImagesOneServerNegativeTestJSON-435923336 tempest-ImagesOneServerNegativeTestJSON-435923336-project-member] [instance: 4bef75af-bbe8-4c6e-8c06-9c827ece1134] Took 1.05 seconds to deallocate network for instance. [ 648.974084] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-877269e1-1784-425b-9114-47683371262e {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.983569] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-501092d6-c0b3-495e-83b2-10d71e16a957 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.022515] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d63ace44-5d7b-4588-a090-ec6cb7505ff9 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.032653] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af1e7612-be9b-4050-9d2e-d8b4629a26bf {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.053618] env[69367]: DEBUG nova.compute.provider_tree [None req-0bdb360b-0170-4e12-942c-b15ee8c54cb9 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 649.061454] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a34c9c6d-e38d-4163-9cc3-a39df0a2ad31 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 649.108169] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Powering off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 649.108169] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6b146f2d-9ef5-45f4-aaea-80b377718235 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.116134] env[69367]: DEBUG oslo_vmware.api [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] Waiting for the task: (returnval){ [ 649.116134] env[69367]: value = "task-4233783" [ 649.116134] env[69367]: _type = "Task" [ 649.116134] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 649.128136] env[69367]: DEBUG oslo_vmware.api [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] Task: {'id': task-4233783, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.591675] env[69367]: ERROR nova.scheduler.client.report [None req-0bdb360b-0170-4e12-942c-b15ee8c54cb9 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] [req-34cb8e2e-98a5-45f1-901f-90576656f5c2] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-34cb8e2e-98a5-45f1-901f-90576656f5c2"}]} [ 649.592159] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0bdb360b-0170-4e12-942c-b15ee8c54cb9 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.606s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 649.592866] env[69367]: ERROR nova.compute.manager [None req-0bdb360b-0170-4e12-942c-b15ee8c54cb9 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] Setting instance vm_state to ERROR: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 649.592866] env[69367]: ERROR nova.compute.manager [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] Traceback (most recent call last): [ 649.592866] env[69367]: ERROR nova.compute.manager [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 649.592866] env[69367]: ERROR nova.compute.manager [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] yield [ 649.592866] env[69367]: ERROR nova.compute.manager [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 649.592866] env[69367]: ERROR nova.compute.manager [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] self.set_inventory_for_provider( [ 649.592866] env[69367]: ERROR nova.compute.manager [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 649.592866] env[69367]: ERROR nova.compute.manager [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 649.593144] env[69367]: ERROR nova.compute.manager [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-34cb8e2e-98a5-45f1-901f-90576656f5c2"}]} [ 649.593144] env[69367]: ERROR nova.compute.manager [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] [ 649.593144] env[69367]: ERROR nova.compute.manager [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] During handling of the above exception, another exception occurred: [ 649.593144] env[69367]: ERROR nova.compute.manager [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] [ 649.593144] env[69367]: ERROR nova.compute.manager [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] Traceback (most recent call last): [ 649.593144] env[69367]: ERROR nova.compute.manager [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 649.593144] env[69367]: ERROR nova.compute.manager [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] self._delete_instance(context, instance, bdms) [ 649.593144] env[69367]: ERROR nova.compute.manager [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 649.593144] env[69367]: ERROR nova.compute.manager [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] self._complete_deletion(context, instance) [ 649.593469] env[69367]: ERROR nova.compute.manager [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 649.593469] env[69367]: ERROR nova.compute.manager [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] self._update_resource_tracker(context, instance) [ 649.593469] env[69367]: ERROR nova.compute.manager [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 649.593469] env[69367]: ERROR nova.compute.manager [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] self.rt.update_usage(context, instance, instance.node) [ 649.593469] env[69367]: ERROR nova.compute.manager [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 649.593469] env[69367]: ERROR nova.compute.manager [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] return f(*args, **kwargs) [ 649.593469] env[69367]: ERROR nova.compute.manager [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 649.593469] env[69367]: ERROR nova.compute.manager [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] self._update(context.elevated(), self.compute_nodes[nodename]) [ 649.593469] env[69367]: ERROR nova.compute.manager [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 649.593469] env[69367]: ERROR nova.compute.manager [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] self._update_to_placement(context, compute_node, startup) [ 649.593469] env[69367]: ERROR nova.compute.manager [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 649.593469] env[69367]: ERROR nova.compute.manager [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 649.593854] env[69367]: ERROR nova.compute.manager [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 649.593854] env[69367]: ERROR nova.compute.manager [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] return attempt.get(self._wrap_exception) [ 649.593854] env[69367]: ERROR nova.compute.manager [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 649.593854] env[69367]: ERROR nova.compute.manager [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] six.reraise(self.value[0], self.value[1], self.value[2]) [ 649.593854] env[69367]: ERROR nova.compute.manager [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 649.593854] env[69367]: ERROR nova.compute.manager [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] raise value [ 649.593854] env[69367]: ERROR nova.compute.manager [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 649.593854] env[69367]: ERROR nova.compute.manager [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 649.593854] env[69367]: ERROR nova.compute.manager [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 649.593854] env[69367]: ERROR nova.compute.manager [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] self.reportclient.update_from_provider_tree( [ 649.593854] env[69367]: ERROR nova.compute.manager [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 649.593854] env[69367]: ERROR nova.compute.manager [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] with catch_all(pd.uuid): [ 649.593854] env[69367]: ERROR nova.compute.manager [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 649.595826] env[69367]: ERROR nova.compute.manager [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] self.gen.throw(typ, value, traceback) [ 649.595826] env[69367]: ERROR nova.compute.manager [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 649.595826] env[69367]: ERROR nova.compute.manager [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] raise exception.ResourceProviderSyncFailed() [ 649.595826] env[69367]: ERROR nova.compute.manager [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 649.595826] env[69367]: ERROR nova.compute.manager [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] [ 649.598548] env[69367]: DEBUG oslo_concurrency.lockutils [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.348s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 649.600646] env[69367]: INFO nova.compute.claims [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 649.634919] env[69367]: DEBUG oslo_vmware.api [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] Task: {'id': task-4233783, 'name': PowerOffVM_Task, 'duration_secs': 0.153979} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 649.634919] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Powered off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 649.634919] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Destroying instance {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 649.635278] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fee1ca29-6b1f-4acc-82ae-6851e7b283c0 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.645050] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Unregistering the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 649.645050] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cdce5cbe-0f78-4ff6-a346-69c6a941b94a {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.674029] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Unregistered the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 649.674029] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Deleting contents of the VM from datastore datastore1 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 649.674029] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] Deleting the datastore file [datastore1] 4e346ed1-36e9-421d-975f-e8bb6f05c0a0 {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 649.674029] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d031ea06-4bf4-430a-8394-83f05433feee {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.683352] env[69367]: DEBUG oslo_vmware.api [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] Waiting for the task: (returnval){ [ 649.683352] env[69367]: value = "task-4233785" [ 649.683352] env[69367]: _type = "Task" [ 649.683352] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 649.692213] env[69367]: INFO nova.scheduler.client.report [None req-37f23c14-fa8a-4512-b66e-872b851a765a tempest-ImagesOneServerNegativeTestJSON-435923336 tempest-ImagesOneServerNegativeTestJSON-435923336-project-member] Deleted allocations for instance 4bef75af-bbe8-4c6e-8c06-9c827ece1134 [ 649.707737] env[69367]: DEBUG oslo_vmware.api [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] Task: {'id': task-4233785, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.035627] env[69367]: DEBUG oslo_concurrency.lockutils [None req-7b94712f-24c6-4e7c-bb81-62b66d431601 tempest-DeleteServersAdminTestJSON-676728523 tempest-DeleteServersAdminTestJSON-676728523-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 650.107929] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0bdb360b-0170-4e12-942c-b15ee8c54cb9 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Lock "a358ce6d-9826-4ddb-8c2f-51bac8db59d4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.877s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 650.181742] env[69367]: DEBUG oslo_concurrency.lockutils [None req-6de81446-9515-41d1-9d8b-68fdd2bfc9f0 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Acquiring lock "5aac4b74-0c19-43e2-a2ce-5bbb6c731dcd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 650.181742] env[69367]: DEBUG oslo_concurrency.lockutils [None req-6de81446-9515-41d1-9d8b-68fdd2bfc9f0 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Lock "5aac4b74-0c19-43e2-a2ce-5bbb6c731dcd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 650.194372] env[69367]: DEBUG oslo_vmware.api [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] Task: {'id': task-4233785, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.14288} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 650.194715] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] Deleted the datastore file {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 650.194950] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Deleted contents of the VM from datastore datastore1 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 650.195152] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Instance destroyed {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 650.210388] env[69367]: DEBUG oslo_concurrency.lockutils [None req-37f23c14-fa8a-4512-b66e-872b851a765a tempest-ImagesOneServerNegativeTestJSON-435923336 tempest-ImagesOneServerNegativeTestJSON-435923336-project-member] Lock "4bef75af-bbe8-4c6e-8c06-9c827ece1134" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.372s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 650.637022] env[69367]: DEBUG nova.scheduler.client.report [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 650.653596] env[69367]: DEBUG nova.scheduler.client.report [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 650.653844] env[69367]: DEBUG nova.compute.provider_tree [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 650.667998] env[69367]: DEBUG nova.scheduler.client.report [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 650.689418] env[69367]: DEBUG nova.scheduler.client.report [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 650.714978] env[69367]: DEBUG nova.compute.manager [None req-feb72e4c-b567-4182-bef0-66f7e1a58d17 tempest-ServerDiagnosticsNegativeTest-1764795038 tempest-ServerDiagnosticsNegativeTest-1764795038-project-member] [instance: c7bc6ebd-d7fd-439a-829f-8f4bf2065623] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 651.060041] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b43eb317-1a23-46ae-b421-2db3fef02898 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.074277] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e813f50-b0c1-4336-be07-07795337e592 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.117185] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85bdc369-3451-4b8a-88fd-45c1d309997a {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.127826] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75e96e44-2d50-4a91-a748-adaa40a17be8 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.150198] env[69367]: DEBUG nova.compute.provider_tree [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Inventory has not changed in ProviderTree for provider: 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 651.244631] env[69367]: DEBUG oslo_concurrency.lockutils [None req-feb72e4c-b567-4182-bef0-66f7e1a58d17 tempest-ServerDiagnosticsNegativeTest-1764795038 tempest-ServerDiagnosticsNegativeTest-1764795038-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 651.247996] env[69367]: DEBUG nova.virt.hardware [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] Getting desirable topologies for flavor Flavor(created_at=2025-05-19T12:49:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-19T12:49:28Z,direct_url=,disk_format='vmdk',id=2b099420-9152-4d93-9609-4c9317824c11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cd7c200d5cd6461fb951580f8c764c42',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-19T12:49:29Z,virtual_size=,visibility=), allow threads: False {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 651.247996] env[69367]: DEBUG nova.virt.hardware [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] Flavor limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 651.247996] env[69367]: DEBUG nova.virt.hardware [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] Image limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 651.247996] env[69367]: DEBUG nova.virt.hardware [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] Flavor pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 651.248266] env[69367]: DEBUG nova.virt.hardware [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] Image pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 651.248586] env[69367]: DEBUG nova.virt.hardware [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 651.248877] env[69367]: DEBUG nova.virt.hardware [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 651.249113] env[69367]: DEBUG nova.virt.hardware [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 651.249350] env[69367]: DEBUG nova.virt.hardware [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] Got 1 possible topologies {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 651.249527] env[69367]: DEBUG nova.virt.hardware [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 651.249727] env[69367]: DEBUG nova.virt.hardware [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 651.250899] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57be5a68-33ff-4d32-86fb-254926b372c1 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.264871] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc79cea6-e205-496f-8c12-ac2b6b3ede35 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.284353] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Instance VIF info [] {{(pid=69367) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 651.290517] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 651.290831] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Creating VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 651.291118] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7f0fd830-45ee-4075-a172-e1d8fde72a9f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.314714] env[69367]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 651.314714] env[69367]: value = "task-4233786" [ 651.314714] env[69367]: _type = "Task" [ 651.314714] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.325652] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4233786, 'name': CreateVM_Task} progress is 5%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.628390] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0bdb360b-0170-4e12-942c-b15ee8c54cb9 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 651.656553] env[69367]: DEBUG nova.scheduler.client.report [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Inventory has not changed for provider 19ddf8be-7305-4f70-8366-52a9957232e6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 651.828562] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4233786, 'name': CreateVM_Task, 'duration_secs': 0.340055} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.828963] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Created VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 651.833353] env[69367]: DEBUG oslo_concurrency.lockutils [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 651.833353] env[69367]: DEBUG oslo_concurrency.lockutils [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 651.833353] env[69367]: DEBUG oslo_concurrency.lockutils [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] Acquired external semaphore "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 651.833353] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b7c728c1-6098-4f3b-a338-c7e88b439448 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.837926] env[69367]: DEBUG oslo_vmware.api [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] Waiting for the task: (returnval){ [ 651.837926] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]5208dcd8-c54a-c8e1-c68e-7700a8784730" [ 651.837926] env[69367]: _type = "Task" [ 651.837926] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.851830] env[69367]: DEBUG oslo_vmware.api [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]5208dcd8-c54a-c8e1-c68e-7700a8784730, 'name': SearchDatastore_Task} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.851830] env[69367]: DEBUG oslo_concurrency.lockutils [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 651.851830] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Processing image 2b099420-9152-4d93-9609-4c9317824c11 {{(pid=69367) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 651.852118] env[69367]: DEBUG oslo_concurrency.lockutils [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 651.852740] env[69367]: DEBUG oslo_concurrency.lockutils [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 651.853421] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 651.853421] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-912bc59e-84c7-430c-86e3-a5816a2b8843 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.863953] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 651.864247] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] Folder [datastore1] devstack-image-cache_base created. {{(pid=69367) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 651.865319] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c85d09c7-442d-4f6a-8336-1f62322415fa {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.872036] env[69367]: DEBUG oslo_vmware.api [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] Waiting for the task: (returnval){ [ 651.872036] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]5254f9b5-41c8-c0bc-56ff-ff5587a42c99" [ 651.872036] env[69367]: _type = "Task" [ 651.872036] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.882950] env[69367]: DEBUG oslo_vmware.api [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]5254f9b5-41c8-c0bc-56ff-ff5587a42c99, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.167192] env[69367]: DEBUG oslo_concurrency.lockutils [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.569s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 652.167192] env[69367]: DEBUG nova.compute.manager [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Start building networks asynchronously for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 652.169982] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.245s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 652.171763] env[69367]: INFO nova.compute.claims [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] [instance: ba4d981a-19f7-41ef-b7d1-a3f3830fe725] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 652.388486] env[69367]: DEBUG oslo_vmware.api [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]5254f9b5-41c8-c0bc-56ff-ff5587a42c99, 'name': SearchDatastore_Task, 'duration_secs': 0.010179} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 652.389302] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd067dde-5a2f-4b3b-bea8-bb8fe500889a {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.397047] env[69367]: DEBUG oslo_vmware.api [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] Waiting for the task: (returnval){ [ 652.397047] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]521cd6b3-cbee-30d6-145a-4718dd249dc6" [ 652.397047] env[69367]: _type = "Task" [ 652.397047] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 652.410614] env[69367]: DEBUG oslo_vmware.api [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]521cd6b3-cbee-30d6-145a-4718dd249dc6, 'name': SearchDatastore_Task} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 652.411623] env[69367]: DEBUG oslo_concurrency.lockutils [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 652.412472] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] Copying Virtual Disk [datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore1] 4e346ed1-36e9-421d-975f-e8bb6f05c0a0/4e346ed1-36e9-421d-975f-e8bb6f05c0a0.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 652.413245] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-768568a9-58d5-4bbb-acb6-aa9d8663eab9 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.423171] env[69367]: DEBUG oslo_vmware.api [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] Waiting for the task: (returnval){ [ 652.423171] env[69367]: value = "task-4233787" [ 652.423171] env[69367]: _type = "Task" [ 652.423171] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 652.432252] env[69367]: DEBUG oslo_vmware.api [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] Task: {'id': task-4233787, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.680147] env[69367]: DEBUG nova.compute.utils [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Using /dev/sd instead of None {{(pid=69367) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 652.686608] env[69367]: DEBUG nova.compute.manager [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Allocating IP information in the background. {{(pid=69367) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 652.686608] env[69367]: DEBUG nova.network.neutron [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] allocate_for_instance() {{(pid=69367) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 652.830861] env[69367]: DEBUG nova.policy [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cd6690f13e33403c982f7ea1d4ead519', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '26a89ab4163e4b9a801dcbf11c953cf3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69367) authorize /opt/stack/nova/nova/policy.py:192}} [ 652.943131] env[69367]: DEBUG oslo_vmware.api [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] Task: {'id': task-4233787, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.134780] env[69367]: DEBUG oslo_concurrency.lockutils [None req-aabb4e8c-4072-4fce-bdec-9388128f54ff tempest-MigrationsAdminTest-1077393646 tempest-MigrationsAdminTest-1077393646-project-member] Acquiring lock "27267edf-97f5-4238-8d9a-c2ddf0bb252c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 653.135324] env[69367]: DEBUG oslo_concurrency.lockutils [None req-aabb4e8c-4072-4fce-bdec-9388128f54ff tempest-MigrationsAdminTest-1077393646 tempest-MigrationsAdminTest-1077393646-project-member] Lock "27267edf-97f5-4238-8d9a-c2ddf0bb252c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 653.186036] env[69367]: DEBUG nova.compute.manager [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Start building block device mappings for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 653.442566] env[69367]: DEBUG oslo_vmware.api [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] Task: {'id': task-4233787, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.550776} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.442566] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] Copied Virtual Disk [datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore1] 4e346ed1-36e9-421d-975f-e8bb6f05c0a0/4e346ed1-36e9-421d-975f-e8bb6f05c0a0.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 653.442566] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Extending root virtual disk to 1048576 {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 653.442566] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5c885877-df68-4d4c-b898-600914e1edcc {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.450162] env[69367]: DEBUG oslo_vmware.api [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] Waiting for the task: (returnval){ [ 653.450162] env[69367]: value = "task-4233788" [ 653.450162] env[69367]: _type = "Task" [ 653.450162] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.463245] env[69367]: DEBUG oslo_vmware.api [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] Task: {'id': task-4233788, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.579657] env[69367]: DEBUG nova.network.neutron [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Successfully created port: fa738fa1-0be4-4506-8e42-73671661dee1 {{(pid=69367) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 653.594409] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4599fb99-f67f-44a5-abc1-7eda0095a31c {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.604343] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4461150-9657-460a-b542-2f78d21a210d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.645605] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f985873-c5d7-40ae-8a99-9c9dc2418895 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.654534] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71b7b213-8b9a-4bf9-9b43-9001c7cdb728 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.670686] env[69367]: DEBUG nova.compute.provider_tree [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Inventory has not changed in ProviderTree for provider: 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 653.964652] env[69367]: DEBUG oslo_vmware.api [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] Task: {'id': task-4233788, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071927} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.964821] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Extended root virtual disk {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 653.967116] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99e36d20-7e82-4276-9e8b-7652b66ce525 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.994856] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Reconfiguring VM instance instance-00000005 to attach disk [datastore1] 4e346ed1-36e9-421d-975f-e8bb6f05c0a0/4e346ed1-36e9-421d-975f-e8bb6f05c0a0.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 653.994856] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-17335e4b-7994-42ad-ae79-22347c606fe9 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.015389] env[69367]: DEBUG oslo_vmware.api [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] Waiting for the task: (returnval){ [ 654.015389] env[69367]: value = "task-4233789" [ 654.015389] env[69367]: _type = "Task" [ 654.015389] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.026130] env[69367]: DEBUG oslo_vmware.api [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] Task: {'id': task-4233789, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.174050] env[69367]: DEBUG nova.scheduler.client.report [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Inventory has not changed for provider 19ddf8be-7305-4f70-8366-52a9957232e6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 654.203633] env[69367]: DEBUG nova.compute.manager [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Start spawning the instance on the hypervisor. {{(pid=69367) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 654.254971] env[69367]: DEBUG nova.virt.hardware [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-19T12:49:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-19T12:49:28Z,direct_url=,disk_format='vmdk',id=2b099420-9152-4d93-9609-4c9317824c11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cd7c200d5cd6461fb951580f8c764c42',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-19T12:49:29Z,virtual_size=,visibility=), allow threads: False {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 654.255253] env[69367]: DEBUG nova.virt.hardware [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Flavor limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 654.255392] env[69367]: DEBUG nova.virt.hardware [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Image limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 654.255575] env[69367]: DEBUG nova.virt.hardware [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Flavor pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 654.256161] env[69367]: DEBUG nova.virt.hardware [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Image pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 654.256161] env[69367]: DEBUG nova.virt.hardware [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 654.260299] env[69367]: DEBUG nova.virt.hardware [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 654.260299] env[69367]: DEBUG nova.virt.hardware [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 654.260299] env[69367]: DEBUG nova.virt.hardware [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Got 1 possible topologies {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 654.260299] env[69367]: DEBUG nova.virt.hardware [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 654.260299] env[69367]: DEBUG nova.virt.hardware [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 654.260760] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17fa4294-0af6-4dc8-ba64-e13fff14716f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.273562] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0472375-cc81-455b-bdf1-84035712cbf7 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.534020] env[69367]: DEBUG oslo_vmware.api [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] Task: {'id': task-4233789, 'name': ReconfigVM_Task, 'duration_secs': 0.32804} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.534020] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Reconfigured VM instance instance-00000005 to attach disk [datastore1] 4e346ed1-36e9-421d-975f-e8bb6f05c0a0/4e346ed1-36e9-421d-975f-e8bb6f05c0a0.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 654.534020] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bf476366-e615-43c6-a25b-eb24ac8eecaf {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.541883] env[69367]: DEBUG oslo_vmware.api [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] Waiting for the task: (returnval){ [ 654.541883] env[69367]: value = "task-4233790" [ 654.541883] env[69367]: _type = "Task" [ 654.541883] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.555340] env[69367]: DEBUG oslo_vmware.api [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] Task: {'id': task-4233790, 'name': Rename_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.682317] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.513s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 654.683025] env[69367]: DEBUG nova.compute.manager [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] [instance: ba4d981a-19f7-41ef-b7d1-a3f3830fe725] Start building networks asynchronously for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 654.690201] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.078s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 654.691725] env[69367]: INFO nova.compute.claims [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] [instance: fa4a5dbc-b885-4439-8520-0bfff38438b3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 655.060622] env[69367]: DEBUG oslo_vmware.api [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] Task: {'id': task-4233790, 'name': Rename_Task, 'duration_secs': 0.225512} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.060993] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Powering on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 655.061293] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a5ceebd5-3496-41ec-a65f-e4dbf233ed42 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.074500] env[69367]: DEBUG oslo_vmware.api [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] Waiting for the task: (returnval){ [ 655.074500] env[69367]: value = "task-4233791" [ 655.074500] env[69367]: _type = "Task" [ 655.074500] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.084600] env[69367]: DEBUG oslo_vmware.api [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] Task: {'id': task-4233791, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.197068] env[69367]: DEBUG nova.compute.utils [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Using /dev/sd instead of None {{(pid=69367) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 655.198477] env[69367]: DEBUG nova.compute.manager [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] [instance: ba4d981a-19f7-41ef-b7d1-a3f3830fe725] Allocating IP information in the background. {{(pid=69367) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 655.198776] env[69367]: DEBUG nova.network.neutron [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] [instance: ba4d981a-19f7-41ef-b7d1-a3f3830fe725] allocate_for_instance() {{(pid=69367) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 655.589938] env[69367]: DEBUG oslo_vmware.api [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] Task: {'id': task-4233791, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.594730] env[69367]: DEBUG nova.policy [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '82eb3de090e94f019532210e1a5ed361', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c210e789b141449ba9a29ab8bbc39746', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69367) authorize /opt/stack/nova/nova/policy.py:192}} [ 655.705726] env[69367]: DEBUG nova.compute.manager [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] [instance: ba4d981a-19f7-41ef-b7d1-a3f3830fe725] Start building block device mappings for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 656.050395] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ec6de82-6ab8-46a9-8b8d-63f00cb7877f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.061108] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-930c5575-5f57-481c-a821-bf10a872d48f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.102792] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd896ae2-1420-4bb0-9f73-1c49839043f2 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.112583] env[69367]: DEBUG oslo_vmware.api [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] Task: {'id': task-4233791, 'name': PowerOnVM_Task, 'duration_secs': 0.617828} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.115573] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Powered on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 656.115856] env[69367]: DEBUG nova.compute.manager [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Checking state {{(pid=69367) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 656.117014] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0a8d285-0d1f-4e35-8601-58a46bfc0a3b {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.121041] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f88ed0c8-8b49-416e-a658-f10762f7e6cf {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.142444] env[69367]: DEBUG nova.compute.provider_tree [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Inventory has not changed in ProviderTree for provider: 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 656.653252] env[69367]: DEBUG nova.scheduler.client.report [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Inventory has not changed for provider 19ddf8be-7305-4f70-8366-52a9957232e6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 656.658379] env[69367]: DEBUG oslo_concurrency.lockutils [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 656.721195] env[69367]: DEBUG nova.compute.manager [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] [instance: ba4d981a-19f7-41ef-b7d1-a3f3830fe725] Start spawning the instance on the hypervisor. {{(pid=69367) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 656.758893] env[69367]: DEBUG nova.virt.hardware [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-19T12:49:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-19T12:49:28Z,direct_url=,disk_format='vmdk',id=2b099420-9152-4d93-9609-4c9317824c11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cd7c200d5cd6461fb951580f8c764c42',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-19T12:49:29Z,virtual_size=,visibility=), allow threads: False {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 656.759160] env[69367]: DEBUG nova.virt.hardware [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Flavor limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 656.759316] env[69367]: DEBUG nova.virt.hardware [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Image limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 656.759493] env[69367]: DEBUG nova.virt.hardware [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Flavor pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 656.759638] env[69367]: DEBUG nova.virt.hardware [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Image pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 656.759797] env[69367]: DEBUG nova.virt.hardware [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 656.759995] env[69367]: DEBUG nova.virt.hardware [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 656.760310] env[69367]: DEBUG nova.virt.hardware [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 656.760576] env[69367]: DEBUG nova.virt.hardware [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Got 1 possible topologies {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 656.760793] env[69367]: DEBUG nova.virt.hardware [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 656.761055] env[69367]: DEBUG nova.virt.hardware [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 656.762060] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaabb2e9-8638-4afa-8e2a-f58b1a9649f0 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.771745] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b116f9d-3b40-470a-8979-6d28d9531774 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.931384] env[69367]: DEBUG nova.compute.manager [req-57ae6dc4-6f74-4022-8004-ede6e54b5706 req-6a5c8a46-100d-453a-ad83-fabb32eda594 service nova] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Received event network-vif-plugged-fa738fa1-0be4-4506-8e42-73671661dee1 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 656.932516] env[69367]: DEBUG oslo_concurrency.lockutils [req-57ae6dc4-6f74-4022-8004-ede6e54b5706 req-6a5c8a46-100d-453a-ad83-fabb32eda594 service nova] Acquiring lock "8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 656.932516] env[69367]: DEBUG oslo_concurrency.lockutils [req-57ae6dc4-6f74-4022-8004-ede6e54b5706 req-6a5c8a46-100d-453a-ad83-fabb32eda594 service nova] Lock "8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 656.932516] env[69367]: DEBUG oslo_concurrency.lockutils [req-57ae6dc4-6f74-4022-8004-ede6e54b5706 req-6a5c8a46-100d-453a-ad83-fabb32eda594 service nova] Lock "8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 656.932516] env[69367]: DEBUG nova.compute.manager [req-57ae6dc4-6f74-4022-8004-ede6e54b5706 req-6a5c8a46-100d-453a-ad83-fabb32eda594 service nova] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] No waiting events found dispatching network-vif-plugged-fa738fa1-0be4-4506-8e42-73671661dee1 {{(pid=69367) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 656.932882] env[69367]: WARNING nova.compute.manager [req-57ae6dc4-6f74-4022-8004-ede6e54b5706 req-6a5c8a46-100d-453a-ad83-fabb32eda594 service nova] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Received unexpected event network-vif-plugged-fa738fa1-0be4-4506-8e42-73671661dee1 for instance with vm_state building and task_state spawning. [ 657.064364] env[69367]: DEBUG nova.network.neutron [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Successfully updated port: fa738fa1-0be4-4506-8e42-73671661dee1 {{(pid=69367) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 657.158946] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.473s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 657.159456] env[69367]: DEBUG nova.compute.manager [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] [instance: fa4a5dbc-b885-4439-8520-0bfff38438b3] Start building networks asynchronously for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 657.162151] env[69367]: DEBUG oslo_concurrency.lockutils [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.884s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 657.163620] env[69367]: INFO nova.compute.claims [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab365570-ac29-4094-be4c-d49563a465c8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 657.569201] env[69367]: DEBUG oslo_concurrency.lockutils [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Acquiring lock "refresh_cache-8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 657.569462] env[69367]: DEBUG oslo_concurrency.lockutils [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Acquired lock "refresh_cache-8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 657.569652] env[69367]: DEBUG nova.network.neutron [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 657.656855] env[69367]: DEBUG nova.network.neutron [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] [instance: ba4d981a-19f7-41ef-b7d1-a3f3830fe725] Successfully created port: 40ba702d-0ae2-48ae-acc0-37f002e4ef6a {{(pid=69367) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 657.660524] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2da95dcb-8d53-4ea0-a385-7bc1951f8313 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Acquiring lock "4e346ed1-36e9-421d-975f-e8bb6f05c0a0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 657.660759] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2da95dcb-8d53-4ea0-a385-7bc1951f8313 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Lock "4e346ed1-36e9-421d-975f-e8bb6f05c0a0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 657.660958] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2da95dcb-8d53-4ea0-a385-7bc1951f8313 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Acquiring lock "4e346ed1-36e9-421d-975f-e8bb6f05c0a0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 657.661490] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2da95dcb-8d53-4ea0-a385-7bc1951f8313 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Lock "4e346ed1-36e9-421d-975f-e8bb6f05c0a0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 657.661490] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2da95dcb-8d53-4ea0-a385-7bc1951f8313 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Lock "4e346ed1-36e9-421d-975f-e8bb6f05c0a0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 657.667591] env[69367]: INFO nova.compute.manager [None req-2da95dcb-8d53-4ea0-a385-7bc1951f8313 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Terminating instance [ 657.681517] env[69367]: DEBUG nova.compute.utils [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Using /dev/sd instead of None {{(pid=69367) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 657.686410] env[69367]: DEBUG nova.compute.manager [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] [instance: fa4a5dbc-b885-4439-8520-0bfff38438b3] Allocating IP information in the background. {{(pid=69367) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 657.686410] env[69367]: DEBUG nova.network.neutron [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] [instance: fa4a5dbc-b885-4439-8520-0bfff38438b3] allocate_for_instance() {{(pid=69367) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 657.842141] env[69367]: DEBUG nova.policy [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1f009581108542a986ba64a033fa791c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'eaf9cd789bbf45df90fb39796f90f041', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69367) authorize /opt/stack/nova/nova/policy.py:192}} [ 658.190395] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2da95dcb-8d53-4ea0-a385-7bc1951f8313 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Acquiring lock "refresh_cache-4e346ed1-36e9-421d-975f-e8bb6f05c0a0" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 658.190395] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2da95dcb-8d53-4ea0-a385-7bc1951f8313 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Acquired lock "refresh_cache-4e346ed1-36e9-421d-975f-e8bb6f05c0a0" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 658.190572] env[69367]: DEBUG nova.network.neutron [None req-2da95dcb-8d53-4ea0-a385-7bc1951f8313 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 658.205504] env[69367]: DEBUG nova.compute.manager [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] [instance: fa4a5dbc-b885-4439-8520-0bfff38438b3] Start building block device mappings for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 658.245962] env[69367]: DEBUG nova.network.neutron [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 658.653788] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72cf58a0-3e34-407f-bfc0-173dc5481eb1 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.663856] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d961c7ae-df42-4272-a10f-54c57b2082fd {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.706448] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-663d10b5-08ee-4f21-807e-e74cfccdb7d4 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.728295] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2c7950d-f187-4c09-ab28-7d242a5ca3ad {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.751199] env[69367]: DEBUG nova.compute.provider_tree [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Inventory has not changed in ProviderTree for provider: 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 658.807885] env[69367]: DEBUG nova.network.neutron [None req-2da95dcb-8d53-4ea0-a385-7bc1951f8313 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 658.962473] env[69367]: DEBUG nova.network.neutron [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Updating instance_info_cache with network_info: [{"id": "fa738fa1-0be4-4506-8e42-73671661dee1", "address": "fa:16:3e:c7:7c:7c", "network": {"id": "0b0a82c2-1a70-4174-a75e-5863d3505b2c", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1091682254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "26a89ab4163e4b9a801dcbf11c953cf3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfa738fa1-0b", "ovs_interfaceid": "fa738fa1-0be4-4506-8e42-73671661dee1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 659.137274] env[69367]: DEBUG nova.network.neutron [None req-2da95dcb-8d53-4ea0-a385-7bc1951f8313 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 659.240732] env[69367]: DEBUG nova.compute.manager [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] [instance: fa4a5dbc-b885-4439-8520-0bfff38438b3] Start spawning the instance on the hypervisor. {{(pid=69367) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 659.254639] env[69367]: DEBUG nova.scheduler.client.report [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Inventory has not changed for provider 19ddf8be-7305-4f70-8366-52a9957232e6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 659.269878] env[69367]: DEBUG nova.virt.hardware [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-19T12:49:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-19T12:49:28Z,direct_url=,disk_format='vmdk',id=2b099420-9152-4d93-9609-4c9317824c11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cd7c200d5cd6461fb951580f8c764c42',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-19T12:49:29Z,virtual_size=,visibility=), allow threads: False {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 659.270164] env[69367]: DEBUG nova.virt.hardware [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Flavor limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 659.270325] env[69367]: DEBUG nova.virt.hardware [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Image limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 659.270508] env[69367]: DEBUG nova.virt.hardware [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Flavor pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 659.270654] env[69367]: DEBUG nova.virt.hardware [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Image pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 659.270801] env[69367]: DEBUG nova.virt.hardware [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 659.271095] env[69367]: DEBUG nova.virt.hardware [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 659.271842] env[69367]: DEBUG nova.virt.hardware [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 659.272131] env[69367]: DEBUG nova.virt.hardware [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Got 1 possible topologies {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 659.272530] env[69367]: DEBUG nova.virt.hardware [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 659.272943] env[69367]: DEBUG nova.virt.hardware [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 659.273896] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2006e553-f677-4a18-98b5-226fcd0ae83b {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.285031] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3079b06f-9bc4-44fa-adf6-1b4595d1ab4d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.469934] env[69367]: DEBUG oslo_concurrency.lockutils [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Releasing lock "refresh_cache-8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 659.470222] env[69367]: DEBUG nova.compute.manager [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Instance network_info: |[{"id": "fa738fa1-0be4-4506-8e42-73671661dee1", "address": "fa:16:3e:c7:7c:7c", "network": {"id": "0b0a82c2-1a70-4174-a75e-5863d3505b2c", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1091682254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "26a89ab4163e4b9a801dcbf11c953cf3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfa738fa1-0b", "ovs_interfaceid": "fa738fa1-0be4-4506-8e42-73671661dee1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69367) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 659.470699] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c7:7c:7c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '92fe29b3-0907-453d-aabb-5559c4bd7c0f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fa738fa1-0be4-4506-8e42-73671661dee1', 'vif_model': 'vmxnet3'}] {{(pid=69367) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 659.478580] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Creating folder: Project (26a89ab4163e4b9a801dcbf11c953cf3). Parent ref: group-v837645. {{(pid=69367) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 659.478871] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7d97bbd6-e0fb-4924-b89e-c689bfd6042b {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.491760] env[69367]: INFO nova.virt.vmwareapi.vm_util [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Created folder: Project (26a89ab4163e4b9a801dcbf11c953cf3) in parent group-v837645. [ 659.491998] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Creating folder: Instances. Parent ref: group-v837677. {{(pid=69367) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 659.492269] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-491430d3-1bfd-49bf-bd00-7303cd0567cd {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.509025] env[69367]: INFO nova.virt.vmwareapi.vm_util [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Created folder: Instances in parent group-v837677. [ 659.509482] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 659.510072] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Creating VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 659.510416] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0342db1a-c638-4d96-abc4-be9f02a7e1f0 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.543171] env[69367]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 659.543171] env[69367]: value = "task-4233794" [ 659.543171] env[69367]: _type = "Task" [ 659.543171] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.553299] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4233794, 'name': CreateVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.643391] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2da95dcb-8d53-4ea0-a385-7bc1951f8313 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Releasing lock "refresh_cache-4e346ed1-36e9-421d-975f-e8bb6f05c0a0" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 659.643954] env[69367]: DEBUG nova.compute.manager [None req-2da95dcb-8d53-4ea0-a385-7bc1951f8313 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Start destroying the instance on the hypervisor. {{(pid=69367) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 659.644180] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-2da95dcb-8d53-4ea0-a385-7bc1951f8313 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Destroying instance {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 659.645184] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb6ff02c-cad4-4300-b045-72e674838f63 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.654671] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-2da95dcb-8d53-4ea0-a385-7bc1951f8313 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Powering off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 659.655614] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-66562093-c325-41ac-a2b4-d8d85ac02be6 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.662742] env[69367]: DEBUG oslo_vmware.api [None req-2da95dcb-8d53-4ea0-a385-7bc1951f8313 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Waiting for the task: (returnval){ [ 659.662742] env[69367]: value = "task-4233795" [ 659.662742] env[69367]: _type = "Task" [ 659.662742] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.675570] env[69367]: DEBUG oslo_vmware.api [None req-2da95dcb-8d53-4ea0-a385-7bc1951f8313 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Task: {'id': task-4233795, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.759910] env[69367]: DEBUG oslo_concurrency.lockutils [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.598s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 659.762580] env[69367]: DEBUG nova.compute.manager [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab365570-ac29-4094-be4c-d49563a465c8] Start building networks asynchronously for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 659.765848] env[69367]: DEBUG oslo_concurrency.lockutils [None req-60cb02e4-845c-48dc-b93d-4834b21efeca tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.922s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 659.766342] env[69367]: DEBUG nova.objects.instance [None req-60cb02e4-845c-48dc-b93d-4834b21efeca tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Lazy-loading 'resources' on Instance uuid 1302cad6-55b7-4905-92c1-dfdd37042e30 {{(pid=69367) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 660.060248] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4233794, 'name': CreateVM_Task, 'duration_secs': 0.393942} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.060248] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Created VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 660.060248] env[69367]: DEBUG oslo_concurrency.lockutils [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 660.060248] env[69367]: DEBUG oslo_concurrency.lockutils [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 660.060248] env[69367]: DEBUG oslo_concurrency.lockutils [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 660.060573] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-df3b1f2f-b58b-4bf9-8c99-6d5cf408d3ca {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.064650] env[69367]: DEBUG nova.network.neutron [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] [instance: fa4a5dbc-b885-4439-8520-0bfff38438b3] Successfully created port: b9b8324a-008d-47c5-a1e4-571d6275a798 {{(pid=69367) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 660.070055] env[69367]: DEBUG oslo_vmware.api [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Waiting for the task: (returnval){ [ 660.070055] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52a8f52e-f538-aedf-491d-553f03f932a5" [ 660.070055] env[69367]: _type = "Task" [ 660.070055] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.083779] env[69367]: DEBUG oslo_vmware.api [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52a8f52e-f538-aedf-491d-553f03f932a5, 'name': SearchDatastore_Task, 'duration_secs': 0.01054} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.084543] env[69367]: DEBUG oslo_concurrency.lockutils [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 660.084992] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Processing image 2b099420-9152-4d93-9609-4c9317824c11 {{(pid=69367) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 660.085590] env[69367]: DEBUG oslo_concurrency.lockutils [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 660.085590] env[69367]: DEBUG oslo_concurrency.lockutils [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 660.085590] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 660.086284] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c0cad5b4-b2c3-481f-b3de-bced9dae3ee4 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.097161] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 660.097407] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69367) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 660.098114] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-93d5f4da-be15-4fe6-b3c5-7673cf4c773f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.107922] env[69367]: DEBUG oslo_vmware.api [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Waiting for the task: (returnval){ [ 660.107922] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52b0ee4f-83e3-2841-384c-67fe78e40642" [ 660.107922] env[69367]: _type = "Task" [ 660.107922] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.122017] env[69367]: DEBUG oslo_vmware.api [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52b0ee4f-83e3-2841-384c-67fe78e40642, 'name': SearchDatastore_Task, 'duration_secs': 0.01058} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.122017] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0735eda5-45c0-4a46-8937-e37d63b750b8 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.130470] env[69367]: DEBUG oslo_vmware.api [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Waiting for the task: (returnval){ [ 660.130470] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52588a9b-8056-1945-2247-4074186c6949" [ 660.130470] env[69367]: _type = "Task" [ 660.130470] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.148674] env[69367]: DEBUG oslo_vmware.api [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52588a9b-8056-1945-2247-4074186c6949, 'name': SearchDatastore_Task, 'duration_secs': 0.01059} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.148964] env[69367]: DEBUG oslo_concurrency.lockutils [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 660.149238] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore2] 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa/8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 660.149558] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-62156967-7d0c-4ba1-9d9a-d922f4605cd9 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.161110] env[69367]: DEBUG oslo_vmware.api [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Waiting for the task: (returnval){ [ 660.161110] env[69367]: value = "task-4233796" [ 660.161110] env[69367]: _type = "Task" [ 660.161110] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.173756] env[69367]: DEBUG oslo_vmware.api [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4233796, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.181231] env[69367]: DEBUG oslo_vmware.api [None req-2da95dcb-8d53-4ea0-a385-7bc1951f8313 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Task: {'id': task-4233795, 'name': PowerOffVM_Task, 'duration_secs': 0.131695} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.181231] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-2da95dcb-8d53-4ea0-a385-7bc1951f8313 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Powered off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 660.181424] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-2da95dcb-8d53-4ea0-a385-7bc1951f8313 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Unregistering the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 660.181760] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f1542c81-5357-4a50-81b9-df4aac6fb0bd {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.226424] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-2da95dcb-8d53-4ea0-a385-7bc1951f8313 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Unregistered the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 660.226424] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-2da95dcb-8d53-4ea0-a385-7bc1951f8313 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Deleting contents of the VM from datastore datastore1 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 660.226424] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-2da95dcb-8d53-4ea0-a385-7bc1951f8313 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Deleting the datastore file [datastore1] 4e346ed1-36e9-421d-975f-e8bb6f05c0a0 {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 660.226424] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2c040783-7860-4d21-9386-c72f66de930f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.238029] env[69367]: DEBUG oslo_vmware.api [None req-2da95dcb-8d53-4ea0-a385-7bc1951f8313 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Waiting for the task: (returnval){ [ 660.238029] env[69367]: value = "task-4233798" [ 660.238029] env[69367]: _type = "Task" [ 660.238029] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.257389] env[69367]: DEBUG oslo_vmware.api [None req-2da95dcb-8d53-4ea0-a385-7bc1951f8313 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Task: {'id': task-4233798, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.269543] env[69367]: DEBUG nova.compute.utils [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Using /dev/sd instead of None {{(pid=69367) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 660.275476] env[69367]: DEBUG nova.compute.manager [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab365570-ac29-4094-be4c-d49563a465c8] Allocating IP information in the background. {{(pid=69367) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 660.275571] env[69367]: DEBUG nova.network.neutron [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab365570-ac29-4094-be4c-d49563a465c8] allocate_for_instance() {{(pid=69367) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 660.675985] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63ad9703-61b9-4f9b-9df9-e68472f58bd1 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.681874] env[69367]: DEBUG nova.policy [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3a7bbf03595642c3b42cc5e9f5b79bc0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9c8ac08a704e476fbe794f66f61e27a5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69367) authorize /opt/stack/nova/nova/policy.py:192}} [ 660.698427] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92c9a461-4657-4668-9595-f591741e177a {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.703416] env[69367]: DEBUG oslo_vmware.api [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4233796, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.737394] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84b4146f-537d-4ff9-8be8-cf5e1bab9260 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.752081] env[69367]: DEBUG oslo_vmware.api [None req-2da95dcb-8d53-4ea0-a385-7bc1951f8313 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Task: {'id': task-4233798, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.107533} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.755199] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-2da95dcb-8d53-4ea0-a385-7bc1951f8313 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Deleted the datastore file {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 660.755199] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-2da95dcb-8d53-4ea0-a385-7bc1951f8313 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Deleted contents of the VM from datastore datastore1 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 660.755199] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-2da95dcb-8d53-4ea0-a385-7bc1951f8313 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Instance destroyed {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 660.755199] env[69367]: INFO nova.compute.manager [None req-2da95dcb-8d53-4ea0-a385-7bc1951f8313 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Took 1.11 seconds to destroy the instance on the hypervisor. [ 660.755419] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2da95dcb-8d53-4ea0-a385-7bc1951f8313 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 660.755666] env[69367]: DEBUG nova.compute.manager [-] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 660.755764] env[69367]: DEBUG nova.network.neutron [-] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 660.758827] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5aad20d2-b6b5-42d8-93e7-d37882c152ab {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.775448] env[69367]: DEBUG nova.compute.manager [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab365570-ac29-4094-be4c-d49563a465c8] Start building block device mappings for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 660.778580] env[69367]: DEBUG nova.compute.provider_tree [None req-60cb02e4-845c-48dc-b93d-4834b21efeca tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Inventory has not changed in ProviderTree for provider: 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 660.821857] env[69367]: DEBUG nova.network.neutron [-] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 660.840392] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0ff19e7d-e04d-4e0d-8d1c-5d315e749134 tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Acquiring lock "65fcdf23-421a-45c1-880e-a536ec9fbdfd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 660.840629] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0ff19e7d-e04d-4e0d-8d1c-5d315e749134 tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Lock "65fcdf23-421a-45c1-880e-a536ec9fbdfd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 660.914717] env[69367]: DEBUG nova.compute.manager [req-9cdd4113-1600-4b92-9cd4-e507c3f74be5 req-26a83344-07f3-46fa-b8c3-11093d2db3b1 service nova] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Received event network-changed-fa738fa1-0be4-4506-8e42-73671661dee1 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 660.915062] env[69367]: DEBUG nova.compute.manager [req-9cdd4113-1600-4b92-9cd4-e507c3f74be5 req-26a83344-07f3-46fa-b8c3-11093d2db3b1 service nova] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Refreshing instance network info cache due to event network-changed-fa738fa1-0be4-4506-8e42-73671661dee1. {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11772}} [ 660.915142] env[69367]: DEBUG oslo_concurrency.lockutils [req-9cdd4113-1600-4b92-9cd4-e507c3f74be5 req-26a83344-07f3-46fa-b8c3-11093d2db3b1 service nova] Acquiring lock "refresh_cache-8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 660.915293] env[69367]: DEBUG oslo_concurrency.lockutils [req-9cdd4113-1600-4b92-9cd4-e507c3f74be5 req-26a83344-07f3-46fa-b8c3-11093d2db3b1 service nova] Acquired lock "refresh_cache-8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 660.915457] env[69367]: DEBUG nova.network.neutron [req-9cdd4113-1600-4b92-9cd4-e507c3f74be5 req-26a83344-07f3-46fa-b8c3-11093d2db3b1 service nova] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Refreshing network info cache for port fa738fa1-0be4-4506-8e42-73671661dee1 {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 660.986080] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e9a8e492-a9b6-49b5-a43e-ab2503d52af0 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Acquiring lock "171efb4b-7da6-4db3-88db-c36a9d04f872" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 660.986369] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e9a8e492-a9b6-49b5-a43e-ab2503d52af0 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Lock "171efb4b-7da6-4db3-88db-c36a9d04f872" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 661.178659] env[69367]: DEBUG oslo_vmware.api [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4233796, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.552114} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.179066] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore2] 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa/8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 661.179326] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Extending root virtual disk to 1048576 {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 661.179604] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a799c862-ded9-4f75-b768-3e6e48732ea0 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.191788] env[69367]: DEBUG oslo_vmware.api [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Waiting for the task: (returnval){ [ 661.191788] env[69367]: value = "task-4233799" [ 661.191788] env[69367]: _type = "Task" [ 661.191788] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.208624] env[69367]: DEBUG oslo_vmware.api [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4233799, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.288373] env[69367]: DEBUG nova.scheduler.client.report [None req-60cb02e4-845c-48dc-b93d-4834b21efeca tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Inventory has not changed for provider 19ddf8be-7305-4f70-8366-52a9957232e6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 661.325719] env[69367]: DEBUG nova.network.neutron [-] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 661.706030] env[69367]: DEBUG oslo_vmware.api [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4233799, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081034} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.706030] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Extended root virtual disk {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 661.706633] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79e14ea5-3653-42e1-96ec-6aa025d5dda5 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.734735] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Reconfiguring VM instance instance-00000012 to attach disk [datastore2] 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa/8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 661.735223] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1a42398a-91a0-4faf-8563-f942fe0d0d42 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.757729] env[69367]: DEBUG oslo_vmware.api [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Waiting for the task: (returnval){ [ 661.757729] env[69367]: value = "task-4233800" [ 661.757729] env[69367]: _type = "Task" [ 661.757729] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.767404] env[69367]: DEBUG oslo_vmware.api [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4233800, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.800582] env[69367]: DEBUG nova.compute.manager [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab365570-ac29-4094-be4c-d49563a465c8] Start spawning the instance on the hypervisor. {{(pid=69367) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 661.806263] env[69367]: DEBUG oslo_concurrency.lockutils [None req-60cb02e4-845c-48dc-b93d-4834b21efeca tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.040s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 661.808937] env[69367]: DEBUG oslo_concurrency.lockutils [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.197s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 661.810704] env[69367]: INFO nova.compute.claims [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 661.833111] env[69367]: INFO nova.compute.manager [-] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Took 1.08 seconds to deallocate network for instance. [ 661.851775] env[69367]: DEBUG nova.virt.hardware [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-19T12:49:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-19T12:49:28Z,direct_url=,disk_format='vmdk',id=2b099420-9152-4d93-9609-4c9317824c11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cd7c200d5cd6461fb951580f8c764c42',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-19T12:49:29Z,virtual_size=,visibility=), allow threads: False {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 661.855203] env[69367]: DEBUG nova.virt.hardware [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Flavor limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 661.855393] env[69367]: DEBUG nova.virt.hardware [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Image limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 661.855616] env[69367]: DEBUG nova.virt.hardware [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Flavor pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 661.855690] env[69367]: DEBUG nova.virt.hardware [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Image pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 661.855834] env[69367]: DEBUG nova.virt.hardware [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 661.856555] env[69367]: DEBUG nova.virt.hardware [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 661.856555] env[69367]: DEBUG nova.virt.hardware [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 661.856555] env[69367]: DEBUG nova.virt.hardware [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Got 1 possible topologies {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 661.856555] env[69367]: DEBUG nova.virt.hardware [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 661.856814] env[69367]: DEBUG nova.virt.hardware [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 661.857647] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b32df14-1f51-4329-9451-9f03bad0642f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.865087] env[69367]: INFO nova.scheduler.client.report [None req-60cb02e4-845c-48dc-b93d-4834b21efeca tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Deleted allocations for instance 1302cad6-55b7-4905-92c1-dfdd37042e30 [ 661.873913] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f816a816-12b9-449e-b9bb-75a32b0ce6b5 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.987907] env[69367]: DEBUG nova.network.neutron [req-9cdd4113-1600-4b92-9cd4-e507c3f74be5 req-26a83344-07f3-46fa-b8c3-11093d2db3b1 service nova] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Updated VIF entry in instance network info cache for port fa738fa1-0be4-4506-8e42-73671661dee1. {{(pid=69367) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 661.988309] env[69367]: DEBUG nova.network.neutron [req-9cdd4113-1600-4b92-9cd4-e507c3f74be5 req-26a83344-07f3-46fa-b8c3-11093d2db3b1 service nova] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Updating instance_info_cache with network_info: [{"id": "fa738fa1-0be4-4506-8e42-73671661dee1", "address": "fa:16:3e:c7:7c:7c", "network": {"id": "0b0a82c2-1a70-4174-a75e-5863d3505b2c", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1091682254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "26a89ab4163e4b9a801dcbf11c953cf3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfa738fa1-0b", "ovs_interfaceid": "fa738fa1-0be4-4506-8e42-73671661dee1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 662.272147] env[69367]: DEBUG oslo_vmware.api [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4233800, 'name': ReconfigVM_Task, 'duration_secs': 0.307765} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.272147] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Reconfigured VM instance instance-00000012 to attach disk [datastore2] 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa/8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 662.272652] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c813eebd-914f-4fdb-b879-ae6915e0f6f1 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.282219] env[69367]: DEBUG oslo_vmware.api [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Waiting for the task: (returnval){ [ 662.282219] env[69367]: value = "task-4233801" [ 662.282219] env[69367]: _type = "Task" [ 662.282219] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.293037] env[69367]: DEBUG oslo_vmware.api [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4233801, 'name': Rename_Task} progress is 5%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.341553] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2da95dcb-8d53-4ea0-a385-7bc1951f8313 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 662.376632] env[69367]: DEBUG oslo_concurrency.lockutils [None req-60cb02e4-845c-48dc-b93d-4834b21efeca tempest-ServerDiagnosticsTest-302831517 tempest-ServerDiagnosticsTest-302831517-project-member] Lock "1302cad6-55b7-4905-92c1-dfdd37042e30" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.229s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 662.491000] env[69367]: DEBUG oslo_concurrency.lockutils [req-9cdd4113-1600-4b92-9cd4-e507c3f74be5 req-26a83344-07f3-46fa-b8c3-11093d2db3b1 service nova] Releasing lock "refresh_cache-8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 662.794203] env[69367]: DEBUG oslo_vmware.api [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4233801, 'name': Rename_Task, 'duration_secs': 0.16055} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.794554] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Powering on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 662.795560] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-95785018-9b95-440b-b0ed-67c9483f4f06 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.806038] env[69367]: DEBUG oslo_vmware.api [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Waiting for the task: (returnval){ [ 662.806038] env[69367]: value = "task-4233802" [ 662.806038] env[69367]: _type = "Task" [ 662.806038] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.820224] env[69367]: DEBUG oslo_vmware.api [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4233802, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.125043] env[69367]: DEBUG nova.network.neutron [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] [instance: ba4d981a-19f7-41ef-b7d1-a3f3830fe725] Successfully updated port: 40ba702d-0ae2-48ae-acc0-37f002e4ef6a {{(pid=69367) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 663.165028] env[69367]: DEBUG nova.network.neutron [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab365570-ac29-4094-be4c-d49563a465c8] Successfully created port: 6de8ad4d-1ee6-4190-bcaa-941184f740e1 {{(pid=69367) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 663.259590] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-739240b2-a4fe-464d-8fff-9065ba886bf0 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.283671] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ada8aae-55a9-4a8c-b213-21f23a4366b6 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.346311] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23019a44-046f-47d4-91b4-fb2ffb4b78e0 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.366245] env[69367]: DEBUG oslo_vmware.api [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4233802, 'name': PowerOnVM_Task, 'duration_secs': 0.481412} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.366992] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Powered on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 663.366992] env[69367]: INFO nova.compute.manager [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Took 9.16 seconds to spawn the instance on the hypervisor. [ 663.366992] env[69367]: DEBUG nova.compute.manager [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Checking state {{(pid=69367) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 663.368966] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cb2a1f0-acd3-4062-94b3-e10b0536512e {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.375933] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20fa1cc2-3dc2-48f7-8e9b-9b7acf39cb35 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.396993] env[69367]: DEBUG nova.compute.provider_tree [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Inventory has not changed in ProviderTree for provider: 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 663.635063] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Acquiring lock "refresh_cache-ba4d981a-19f7-41ef-b7d1-a3f3830fe725" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 663.635063] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Acquired lock "refresh_cache-ba4d981a-19f7-41ef-b7d1-a3f3830fe725" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 663.635063] env[69367]: DEBUG nova.network.neutron [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] [instance: ba4d981a-19f7-41ef-b7d1-a3f3830fe725] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 663.821281] env[69367]: DEBUG oslo_concurrency.lockutils [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Acquiring lock "10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 663.821642] env[69367]: DEBUG oslo_concurrency.lockutils [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Lock "10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 663.906465] env[69367]: DEBUG nova.scheduler.client.report [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Inventory has not changed for provider 19ddf8be-7305-4f70-8366-52a9957232e6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 663.913428] env[69367]: INFO nova.compute.manager [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Took 27.69 seconds to build instance. [ 664.294590] env[69367]: DEBUG nova.network.neutron [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] [instance: ba4d981a-19f7-41ef-b7d1-a3f3830fe725] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 664.413467] env[69367]: DEBUG oslo_concurrency.lockutils [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.603s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 664.413794] env[69367]: DEBUG nova.compute.manager [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] Start building networks asynchronously for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 664.418295] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2d781f53-8f2b-470c-a4e4-419a37b9e8c6 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.344s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 664.418295] env[69367]: DEBUG nova.objects.instance [None req-2d781f53-8f2b-470c-a4e4-419a37b9e8c6 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Lazy-loading 'resources' on Instance uuid 937c05e9-06f1-4a5f-9f8c-ac40c262ce4e {{(pid=69367) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 664.419869] env[69367]: DEBUG oslo_concurrency.lockutils [None req-deeafe0b-7731-4feb-8d59-141ef277c88d tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Lock "8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.590s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 664.421682] env[69367]: DEBUG nova.compute.manager [req-ebeaced4-4089-44f7-9c78-f2731ab48f3e req-7e2515dd-0269-4478-a467-da01839136c4 service nova] [instance: ba4d981a-19f7-41ef-b7d1-a3f3830fe725] Received event network-vif-plugged-40ba702d-0ae2-48ae-acc0-37f002e4ef6a {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 664.421943] env[69367]: DEBUG oslo_concurrency.lockutils [req-ebeaced4-4089-44f7-9c78-f2731ab48f3e req-7e2515dd-0269-4478-a467-da01839136c4 service nova] Acquiring lock "ba4d981a-19f7-41ef-b7d1-a3f3830fe725-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 664.422815] env[69367]: DEBUG oslo_concurrency.lockutils [req-ebeaced4-4089-44f7-9c78-f2731ab48f3e req-7e2515dd-0269-4478-a467-da01839136c4 service nova] Lock "ba4d981a-19f7-41ef-b7d1-a3f3830fe725-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 664.422815] env[69367]: DEBUG oslo_concurrency.lockutils [req-ebeaced4-4089-44f7-9c78-f2731ab48f3e req-7e2515dd-0269-4478-a467-da01839136c4 service nova] Lock "ba4d981a-19f7-41ef-b7d1-a3f3830fe725-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 664.422815] env[69367]: DEBUG nova.compute.manager [req-ebeaced4-4089-44f7-9c78-f2731ab48f3e req-7e2515dd-0269-4478-a467-da01839136c4 service nova] [instance: ba4d981a-19f7-41ef-b7d1-a3f3830fe725] No waiting events found dispatching network-vif-plugged-40ba702d-0ae2-48ae-acc0-37f002e4ef6a {{(pid=69367) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 664.422815] env[69367]: WARNING nova.compute.manager [req-ebeaced4-4089-44f7-9c78-f2731ab48f3e req-7e2515dd-0269-4478-a467-da01839136c4 service nova] [instance: ba4d981a-19f7-41ef-b7d1-a3f3830fe725] Received unexpected event network-vif-plugged-40ba702d-0ae2-48ae-acc0-37f002e4ef6a for instance with vm_state building and task_state spawning. [ 664.925390] env[69367]: DEBUG nova.compute.utils [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Using /dev/sd instead of None {{(pid=69367) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 664.930572] env[69367]: DEBUG nova.compute.manager [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] Allocating IP information in the background. {{(pid=69367) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 664.930928] env[69367]: DEBUG nova.network.neutron [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] allocate_for_instance() {{(pid=69367) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 664.933904] env[69367]: DEBUG nova.compute.manager [None req-6de81446-9515-41d1-9d8b-68fdd2bfc9f0 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] [instance: 5aac4b74-0c19-43e2-a2ce-5bbb6c731dcd] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 665.082244] env[69367]: DEBUG nova.network.neutron [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] [instance: fa4a5dbc-b885-4439-8520-0bfff38438b3] Successfully updated port: b9b8324a-008d-47c5-a1e4-571d6275a798 {{(pid=69367) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 665.115283] env[69367]: DEBUG nova.network.neutron [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] [instance: ba4d981a-19f7-41ef-b7d1-a3f3830fe725] Updating instance_info_cache with network_info: [{"id": "40ba702d-0ae2-48ae-acc0-37f002e4ef6a", "address": "fa:16:3e:ce:88:f8", "network": {"id": "62ab7c93-8b0d-49ed-aa82-3bc315e190df", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.135", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cd7c200d5cd6461fb951580f8c764c42", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap40ba702d-0a", "ovs_interfaceid": "40ba702d-0ae2-48ae-acc0-37f002e4ef6a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 665.199788] env[69367]: DEBUG nova.policy [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3a7bbf03595642c3b42cc5e9f5b79bc0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9c8ac08a704e476fbe794f66f61e27a5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69367) authorize /opt/stack/nova/nova/policy.py:192}} [ 665.350125] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b80df2e8-b7ab-4a4b-b30c-360bdcf16c35 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.363952] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4d90d5f-c921-426b-a876-401cf19a8600 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.402635] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1481e37e-7176-40cc-932c-1067da049c06 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.411353] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b638bbb8-4041-418b-a7bb-53f653413130 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.427222] env[69367]: DEBUG nova.compute.provider_tree [None req-2d781f53-8f2b-470c-a4e4-419a37b9e8c6 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Inventory has not changed in ProviderTree for provider: 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 665.433796] env[69367]: DEBUG nova.compute.manager [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] Start building block device mappings for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 665.458991] env[69367]: DEBUG oslo_concurrency.lockutils [None req-6de81446-9515-41d1-9d8b-68fdd2bfc9f0 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 665.589047] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Acquiring lock "refresh_cache-fa4a5dbc-b885-4439-8520-0bfff38438b3" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 665.589047] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Acquired lock "refresh_cache-fa4a5dbc-b885-4439-8520-0bfff38438b3" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 665.589047] env[69367]: DEBUG nova.network.neutron [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] [instance: fa4a5dbc-b885-4439-8520-0bfff38438b3] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 665.617941] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Releasing lock "refresh_cache-ba4d981a-19f7-41ef-b7d1-a3f3830fe725" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 665.618405] env[69367]: DEBUG nova.compute.manager [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] [instance: ba4d981a-19f7-41ef-b7d1-a3f3830fe725] Instance network_info: |[{"id": "40ba702d-0ae2-48ae-acc0-37f002e4ef6a", "address": "fa:16:3e:ce:88:f8", "network": {"id": "62ab7c93-8b0d-49ed-aa82-3bc315e190df", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.135", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cd7c200d5cd6461fb951580f8c764c42", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap40ba702d-0a", "ovs_interfaceid": "40ba702d-0ae2-48ae-acc0-37f002e4ef6a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69367) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 665.619388] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] [instance: ba4d981a-19f7-41ef-b7d1-a3f3830fe725] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ce:88:f8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'eb2eaecd-9701-4504-9fcb-fb1a420ead72', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '40ba702d-0ae2-48ae-acc0-37f002e4ef6a', 'vif_model': 'vmxnet3'}] {{(pid=69367) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 665.629452] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Creating folder: Project (c210e789b141449ba9a29ab8bbc39746). Parent ref: group-v837645. {{(pid=69367) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 665.629813] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a8672db2-a95a-4e91-a0a2-cf5fbed8d728 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.645130] env[69367]: INFO nova.virt.vmwareapi.vm_util [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Created folder: Project (c210e789b141449ba9a29ab8bbc39746) in parent group-v837645. [ 665.645407] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Creating folder: Instances. Parent ref: group-v837680. {{(pid=69367) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 665.645919] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f24ca66b-1011-4c8d-8b0a-7f3fa4c4b022 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.658424] env[69367]: INFO nova.virt.vmwareapi.vm_util [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Created folder: Instances in parent group-v837680. [ 665.659243] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 665.659243] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ba4d981a-19f7-41ef-b7d1-a3f3830fe725] Creating VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 665.659243] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7a478e89-ae1f-4660-be12-cf3f9d5dc7f9 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.683095] env[69367]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 665.683095] env[69367]: value = "task-4233805" [ 665.683095] env[69367]: _type = "Task" [ 665.683095] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.692993] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4233805, 'name': CreateVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.930635] env[69367]: DEBUG nova.scheduler.client.report [None req-2d781f53-8f2b-470c-a4e4-419a37b9e8c6 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Inventory has not changed for provider 19ddf8be-7305-4f70-8366-52a9957232e6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 666.199619] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4233805, 'name': CreateVM_Task} progress is 99%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.235144] env[69367]: DEBUG nova.network.neutron [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] [instance: fa4a5dbc-b885-4439-8520-0bfff38438b3] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 666.438280] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2d781f53-8f2b-470c-a4e4-419a37b9e8c6 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.020s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 666.441792] env[69367]: DEBUG oslo_concurrency.lockutils [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.598s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 666.443686] env[69367]: INFO nova.compute.claims [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] [instance: 92c27615-d377-492f-a9db-ff45b2e71537] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 666.449544] env[69367]: DEBUG nova.compute.manager [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] Start spawning the instance on the hypervisor. {{(pid=69367) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 666.488102] env[69367]: DEBUG nova.virt.hardware [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-19T12:49:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-19T12:49:28Z,direct_url=,disk_format='vmdk',id=2b099420-9152-4d93-9609-4c9317824c11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cd7c200d5cd6461fb951580f8c764c42',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-19T12:49:29Z,virtual_size=,visibility=), allow threads: False {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 666.488102] env[69367]: DEBUG nova.virt.hardware [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Flavor limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 666.488102] env[69367]: DEBUG nova.virt.hardware [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Image limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 666.488258] env[69367]: DEBUG nova.virt.hardware [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Flavor pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 666.488258] env[69367]: DEBUG nova.virt.hardware [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Image pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 666.488701] env[69367]: DEBUG nova.virt.hardware [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 666.489049] env[69367]: DEBUG nova.virt.hardware [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 666.489385] env[69367]: DEBUG nova.virt.hardware [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 666.489954] env[69367]: DEBUG nova.virt.hardware [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Got 1 possible topologies {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 666.489954] env[69367]: DEBUG nova.virt.hardware [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 666.494116] env[69367]: DEBUG nova.virt.hardware [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 666.494116] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b698c83-6de6-40eb-ab89-3064d86f3619 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.496821] env[69367]: INFO nova.scheduler.client.report [None req-2d781f53-8f2b-470c-a4e4-419a37b9e8c6 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Deleted allocations for instance 937c05e9-06f1-4a5f-9f8c-ac40c262ce4e [ 666.517463] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c676556-71bb-44a7-a92d-a1de2efb849e {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.694304] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4233805, 'name': CreateVM_Task, 'duration_secs': 0.513813} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.694502] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ba4d981a-19f7-41ef-b7d1-a3f3830fe725] Created VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 666.695343] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 666.695510] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 666.695842] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 666.696152] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-09ebfdf2-86fb-492d-b413-784190fd51dc {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.701997] env[69367]: DEBUG oslo_vmware.api [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Waiting for the task: (returnval){ [ 666.701997] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52b72279-2d38-7ad8-77d7-71b603dae9cd" [ 666.701997] env[69367]: _type = "Task" [ 666.701997] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.717674] env[69367]: DEBUG oslo_vmware.api [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52b72279-2d38-7ad8-77d7-71b603dae9cd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.016019] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2d781f53-8f2b-470c-a4e4-419a37b9e8c6 tempest-ServerDiagnosticsV248Test-1636451139 tempest-ServerDiagnosticsV248Test-1636451139-project-member] Lock "937c05e9-06f1-4a5f-9f8c-ac40c262ce4e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.246s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 667.221534] env[69367]: DEBUG oslo_vmware.api [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52b72279-2d38-7ad8-77d7-71b603dae9cd, 'name': SearchDatastore_Task, 'duration_secs': 0.037797} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.221873] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 667.222178] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] [instance: ba4d981a-19f7-41ef-b7d1-a3f3830fe725] Processing image 2b099420-9152-4d93-9609-4c9317824c11 {{(pid=69367) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 667.222518] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 667.222612] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 667.222747] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 667.223113] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f7ee7a73-2d5e-4429-a0b1-e1a3e50604ca {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.239342] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 667.239530] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69367) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 667.240557] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b294fddd-a460-42b7-a816-d57181533544 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.251094] env[69367]: DEBUG oslo_vmware.api [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Waiting for the task: (returnval){ [ 667.251094] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]521b425d-cc47-bdd5-ba70-67f452b86ab9" [ 667.251094] env[69367]: _type = "Task" [ 667.251094] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.264423] env[69367]: DEBUG oslo_vmware.api [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]521b425d-cc47-bdd5-ba70-67f452b86ab9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.556238] env[69367]: DEBUG nova.network.neutron [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] [instance: fa4a5dbc-b885-4439-8520-0bfff38438b3] Updating instance_info_cache with network_info: [{"id": "b9b8324a-008d-47c5-a1e4-571d6275a798", "address": "fa:16:3e:57:23:3b", "network": {"id": "e960e682-22ec-43d9-b511-c1b4f0dd64a4", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1877720464-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eaf9cd789bbf45df90fb39796f90f041", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2bf99f85-3a5c-47c6-a603-e215be6ab0bd", "external-id": "nsx-vlan-transportzone-855", "segmentation_id": 855, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb9b8324a-00", "ovs_interfaceid": "b9b8324a-008d-47c5-a1e4-571d6275a798", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 667.614454] env[69367]: DEBUG nova.network.neutron [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] Successfully created port: a1308824-de04-4736-b17a-57bf0eae53ff {{(pid=69367) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 667.773386] env[69367]: DEBUG oslo_vmware.api [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]521b425d-cc47-bdd5-ba70-67f452b86ab9, 'name': SearchDatastore_Task, 'duration_secs': 0.014982} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.778762] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-40b70cd3-da7d-4acf-82f1-9ca0928e99da {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.789673] env[69367]: DEBUG oslo_vmware.api [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Waiting for the task: (returnval){ [ 667.789673] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52968e4b-7a49-c079-6365-6d7219934cd1" [ 667.789673] env[69367]: _type = "Task" [ 667.789673] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.798325] env[69367]: DEBUG nova.compute.manager [req-b328ad72-2082-4bbb-8171-d6aff0f51a7d req-dd79e77a-0bac-41cf-9b83-1a5b416e07c7 service nova] [instance: ba4d981a-19f7-41ef-b7d1-a3f3830fe725] Received event network-changed-40ba702d-0ae2-48ae-acc0-37f002e4ef6a {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 667.798676] env[69367]: DEBUG nova.compute.manager [req-b328ad72-2082-4bbb-8171-d6aff0f51a7d req-dd79e77a-0bac-41cf-9b83-1a5b416e07c7 service nova] [instance: ba4d981a-19f7-41ef-b7d1-a3f3830fe725] Refreshing instance network info cache due to event network-changed-40ba702d-0ae2-48ae-acc0-37f002e4ef6a. {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11772}} [ 667.798742] env[69367]: DEBUG oslo_concurrency.lockutils [req-b328ad72-2082-4bbb-8171-d6aff0f51a7d req-dd79e77a-0bac-41cf-9b83-1a5b416e07c7 service nova] Acquiring lock "refresh_cache-ba4d981a-19f7-41ef-b7d1-a3f3830fe725" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 667.798868] env[69367]: DEBUG oslo_concurrency.lockutils [req-b328ad72-2082-4bbb-8171-d6aff0f51a7d req-dd79e77a-0bac-41cf-9b83-1a5b416e07c7 service nova] Acquired lock "refresh_cache-ba4d981a-19f7-41ef-b7d1-a3f3830fe725" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 667.799092] env[69367]: DEBUG nova.network.neutron [req-b328ad72-2082-4bbb-8171-d6aff0f51a7d req-dd79e77a-0bac-41cf-9b83-1a5b416e07c7 service nova] [instance: ba4d981a-19f7-41ef-b7d1-a3f3830fe725] Refreshing network info cache for port 40ba702d-0ae2-48ae-acc0-37f002e4ef6a {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 667.806894] env[69367]: DEBUG oslo_vmware.api [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52968e4b-7a49-c079-6365-6d7219934cd1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.999483] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad8dd2ae-b04d-4dde-a8bf-8c4d367ccd4e {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.010584] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88ac8a59-1441-4c20-8f91-72e4b7278422 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.047293] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9e2378e-231f-4c79-aa86-8d1ed790f4e6 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.058337] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e387b6e-ceac-4bbb-92a7-7e13e10ce220 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.063337] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Releasing lock "refresh_cache-fa4a5dbc-b885-4439-8520-0bfff38438b3" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 668.063963] env[69367]: DEBUG nova.compute.manager [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] [instance: fa4a5dbc-b885-4439-8520-0bfff38438b3] Instance network_info: |[{"id": "b9b8324a-008d-47c5-a1e4-571d6275a798", "address": "fa:16:3e:57:23:3b", "network": {"id": "e960e682-22ec-43d9-b511-c1b4f0dd64a4", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1877720464-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eaf9cd789bbf45df90fb39796f90f041", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2bf99f85-3a5c-47c6-a603-e215be6ab0bd", "external-id": "nsx-vlan-transportzone-855", "segmentation_id": 855, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb9b8324a-00", "ovs_interfaceid": "b9b8324a-008d-47c5-a1e4-571d6275a798", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69367) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 668.064630] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] [instance: fa4a5dbc-b885-4439-8520-0bfff38438b3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:57:23:3b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2bf99f85-3a5c-47c6-a603-e215be6ab0bd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b9b8324a-008d-47c5-a1e4-571d6275a798', 'vif_model': 'vmxnet3'}] {{(pid=69367) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 668.072627] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Creating folder: Project (eaf9cd789bbf45df90fb39796f90f041). Parent ref: group-v837645. {{(pid=69367) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 668.073696] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0f703feb-e9d1-4ede-b7aa-9530978a4f77 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.085842] env[69367]: DEBUG nova.compute.provider_tree [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Inventory has not changed in ProviderTree for provider: 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 668.101106] env[69367]: INFO nova.virt.vmwareapi.vm_util [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Created folder: Project (eaf9cd789bbf45df90fb39796f90f041) in parent group-v837645. [ 668.101106] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Creating folder: Instances. Parent ref: group-v837683. {{(pid=69367) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 668.101106] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b4a24d52-349d-4cbe-81a6-09e49ba58ba2 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.115704] env[69367]: INFO nova.virt.vmwareapi.vm_util [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Created folder: Instances in parent group-v837683. [ 668.115972] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 668.116204] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fa4a5dbc-b885-4439-8520-0bfff38438b3] Creating VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 668.116443] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3541bda0-e299-408e-b40b-66073c7b262c {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.138597] env[69367]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 668.138597] env[69367]: value = "task-4233808" [ 668.138597] env[69367]: _type = "Task" [ 668.138597] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.152700] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4233808, 'name': CreateVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.304569] env[69367]: DEBUG oslo_vmware.api [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52968e4b-7a49-c079-6365-6d7219934cd1, 'name': SearchDatastore_Task, 'duration_secs': 0.036649} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.304569] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 668.304569] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore2] ba4d981a-19f7-41ef-b7d1-a3f3830fe725/ba4d981a-19f7-41ef-b7d1-a3f3830fe725.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 668.304569] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-99a3b5f2-a78b-43b6-99f5-f6397e1cc1be {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.316135] env[69367]: DEBUG oslo_vmware.api [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Waiting for the task: (returnval){ [ 668.316135] env[69367]: value = "task-4233809" [ 668.316135] env[69367]: _type = "Task" [ 668.316135] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.326825] env[69367]: DEBUG oslo_vmware.api [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Task: {'id': task-4233809, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.590591] env[69367]: DEBUG nova.scheduler.client.report [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Inventory has not changed for provider 19ddf8be-7305-4f70-8366-52a9957232e6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 668.659631] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4233808, 'name': CreateVM_Task} progress is 25%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.827999] env[69367]: DEBUG oslo_vmware.api [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Task: {'id': task-4233809, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.093053] env[69367]: DEBUG nova.network.neutron [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab365570-ac29-4094-be4c-d49563a465c8] Successfully updated port: 6de8ad4d-1ee6-4190-bcaa-941184f740e1 {{(pid=69367) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 669.101286] env[69367]: DEBUG oslo_concurrency.lockutils [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.659s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 669.102514] env[69367]: DEBUG nova.compute.manager [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] [instance: 92c27615-d377-492f-a9db-ff45b2e71537] Start building networks asynchronously for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 669.108513] env[69367]: DEBUG oslo_concurrency.lockutils [None req-da89b7a9-d8ce-4d71-a741-e22d0c2a0166 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.309s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 669.108513] env[69367]: DEBUG nova.objects.instance [None req-da89b7a9-d8ce-4d71-a741-e22d0c2a0166 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Lazy-loading 'resources' on Instance uuid 92bdb1b1-d8ab-46b2-9037-ee8fea4642ce {{(pid=69367) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 669.156805] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4233808, 'name': CreateVM_Task, 'duration_secs': 0.675566} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.157067] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fa4a5dbc-b885-4439-8520-0bfff38438b3] Created VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 669.159229] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 669.159429] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 669.159877] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 669.160860] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a5af532a-d4b0-459d-a545-aa2cc3ca2da9 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.168120] env[69367]: DEBUG oslo_vmware.api [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Waiting for the task: (returnval){ [ 669.168120] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52e6b1b6-af3d-906a-c766-7328064600b3" [ 669.168120] env[69367]: _type = "Task" [ 669.168120] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.178715] env[69367]: DEBUG oslo_vmware.api [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52e6b1b6-af3d-906a-c766-7328064600b3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.339384] env[69367]: DEBUG oslo_vmware.api [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Task: {'id': task-4233809, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.881693} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.341518] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore2] ba4d981a-19f7-41ef-b7d1-a3f3830fe725/ba4d981a-19f7-41ef-b7d1-a3f3830fe725.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 669.341518] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] [instance: ba4d981a-19f7-41ef-b7d1-a3f3830fe725] Extending root virtual disk to 1048576 {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 669.341518] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-27ce78ae-28ab-4116-b7dc-6b55bdf9fafa {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.353794] env[69367]: DEBUG oslo_vmware.api [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Waiting for the task: (returnval){ [ 669.353794] env[69367]: value = "task-4233810" [ 669.353794] env[69367]: _type = "Task" [ 669.353794] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.367956] env[69367]: DEBUG oslo_vmware.api [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Task: {'id': task-4233810, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.600457] env[69367]: DEBUG oslo_concurrency.lockutils [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Acquiring lock "refresh_cache-ab365570-ac29-4094-be4c-d49563a465c8" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 669.600457] env[69367]: DEBUG oslo_concurrency.lockutils [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Acquired lock "refresh_cache-ab365570-ac29-4094-be4c-d49563a465c8" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 669.600457] env[69367]: DEBUG nova.network.neutron [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab365570-ac29-4094-be4c-d49563a465c8] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 669.609154] env[69367]: DEBUG nova.compute.utils [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Using /dev/sd instead of None {{(pid=69367) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 669.610705] env[69367]: DEBUG nova.compute.manager [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] [instance: 92c27615-d377-492f-a9db-ff45b2e71537] Allocating IP information in the background. {{(pid=69367) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 669.610788] env[69367]: DEBUG nova.network.neutron [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] [instance: 92c27615-d377-492f-a9db-ff45b2e71537] allocate_for_instance() {{(pid=69367) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 669.683544] env[69367]: DEBUG oslo_vmware.api [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52e6b1b6-af3d-906a-c766-7328064600b3, 'name': SearchDatastore_Task, 'duration_secs': 0.012693} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.691019] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 669.691019] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] [instance: fa4a5dbc-b885-4439-8520-0bfff38438b3] Processing image 2b099420-9152-4d93-9609-4c9317824c11 {{(pid=69367) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 669.691019] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 669.691019] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 669.691516] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 669.691516] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2df6117c-834b-4148-8e21-7ebc63ea4ecf {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.704860] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 669.705080] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69367) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 669.705904] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1821a611-12f5-4df9-8122-8b69f4a26539 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.712910] env[69367]: DEBUG oslo_vmware.api [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Waiting for the task: (returnval){ [ 669.712910] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52cc101b-fa62-901b-5d75-f2c6a4597855" [ 669.712910] env[69367]: _type = "Task" [ 669.712910] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.725214] env[69367]: DEBUG oslo_vmware.api [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52cc101b-fa62-901b-5d75-f2c6a4597855, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.728570] env[69367]: DEBUG nova.network.neutron [req-b328ad72-2082-4bbb-8171-d6aff0f51a7d req-dd79e77a-0bac-41cf-9b83-1a5b416e07c7 service nova] [instance: ba4d981a-19f7-41ef-b7d1-a3f3830fe725] Updated VIF entry in instance network info cache for port 40ba702d-0ae2-48ae-acc0-37f002e4ef6a. {{(pid=69367) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 669.728901] env[69367]: DEBUG nova.network.neutron [req-b328ad72-2082-4bbb-8171-d6aff0f51a7d req-dd79e77a-0bac-41cf-9b83-1a5b416e07c7 service nova] [instance: ba4d981a-19f7-41ef-b7d1-a3f3830fe725] Updating instance_info_cache with network_info: [{"id": "40ba702d-0ae2-48ae-acc0-37f002e4ef6a", "address": "fa:16:3e:ce:88:f8", "network": {"id": "62ab7c93-8b0d-49ed-aa82-3bc315e190df", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.135", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cd7c200d5cd6461fb951580f8c764c42", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap40ba702d-0a", "ovs_interfaceid": "40ba702d-0ae2-48ae-acc0-37f002e4ef6a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 669.869261] env[69367]: DEBUG oslo_vmware.api [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Task: {'id': task-4233810, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082231} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.872731] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] [instance: ba4d981a-19f7-41ef-b7d1-a3f3830fe725] Extended root virtual disk {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 669.874888] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7b38bc7-4145-4781-b0f8-5f53f4ffbee2 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.903311] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] [instance: ba4d981a-19f7-41ef-b7d1-a3f3830fe725] Reconfiguring VM instance instance-00000013 to attach disk [datastore2] ba4d981a-19f7-41ef-b7d1-a3f3830fe725/ba4d981a-19f7-41ef-b7d1-a3f3830fe725.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 669.904902] env[69367]: DEBUG nova.policy [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b7fd8384e0da4db3a6a57a34e0d73574', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e332ecec6c1c43c18345d8a2761d98be', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69367) authorize /opt/stack/nova/nova/policy.py:192}} [ 669.909349] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4c65ff7c-0649-4e02-9607-d1ebd15b4466 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.935866] env[69367]: DEBUG oslo_vmware.api [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Waiting for the task: (returnval){ [ 669.935866] env[69367]: value = "task-4233811" [ 669.935866] env[69367]: _type = "Task" [ 669.935866] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.947594] env[69367]: DEBUG oslo_vmware.api [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Task: {'id': task-4233811, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.021441] env[69367]: DEBUG oslo_concurrency.lockutils [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Acquiring lock "bdc0938b-60ef-463a-b3fd-1754f38a3b79" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 670.021707] env[69367]: DEBUG oslo_concurrency.lockutils [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Lock "bdc0938b-60ef-463a-b3fd-1754f38a3b79" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 670.117565] env[69367]: DEBUG nova.compute.manager [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] [instance: 92c27615-d377-492f-a9db-ff45b2e71537] Start building block device mappings for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 670.130291] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19dd7a71-2a66-4e0d-8281-be3c48bfe7f6 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.141910] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df1a1d37-3d18-416f-bfaa-4ee0f9bbbe81 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.184196] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-079b9011-4bc5-42cc-ab17-8e99fe8d6f6b {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.194562] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02eebcaa-6f27-4392-86fd-d1c86dd9a2da {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.216760] env[69367]: DEBUG nova.compute.provider_tree [None req-da89b7a9-d8ce-4d71-a741-e22d0c2a0166 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Inventory has not changed in ProviderTree for provider: 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 670.228017] env[69367]: DEBUG oslo_vmware.api [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52cc101b-fa62-901b-5d75-f2c6a4597855, 'name': SearchDatastore_Task, 'duration_secs': 0.041328} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.229245] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9f876542-a27e-4b9e-83d0-132d32eb1d3c {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.232985] env[69367]: DEBUG oslo_concurrency.lockutils [req-b328ad72-2082-4bbb-8171-d6aff0f51a7d req-dd79e77a-0bac-41cf-9b83-1a5b416e07c7 service nova] Releasing lock "refresh_cache-ba4d981a-19f7-41ef-b7d1-a3f3830fe725" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 670.233332] env[69367]: DEBUG nova.compute.manager [req-b328ad72-2082-4bbb-8171-d6aff0f51a7d req-dd79e77a-0bac-41cf-9b83-1a5b416e07c7 service nova] [instance: fa4a5dbc-b885-4439-8520-0bfff38438b3] Received event network-vif-plugged-b9b8324a-008d-47c5-a1e4-571d6275a798 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 670.233615] env[69367]: DEBUG oslo_concurrency.lockutils [req-b328ad72-2082-4bbb-8171-d6aff0f51a7d req-dd79e77a-0bac-41cf-9b83-1a5b416e07c7 service nova] Acquiring lock "fa4a5dbc-b885-4439-8520-0bfff38438b3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 670.234039] env[69367]: DEBUG oslo_concurrency.lockutils [req-b328ad72-2082-4bbb-8171-d6aff0f51a7d req-dd79e77a-0bac-41cf-9b83-1a5b416e07c7 service nova] Lock "fa4a5dbc-b885-4439-8520-0bfff38438b3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 670.234784] env[69367]: DEBUG oslo_concurrency.lockutils [req-b328ad72-2082-4bbb-8171-d6aff0f51a7d req-dd79e77a-0bac-41cf-9b83-1a5b416e07c7 service nova] Lock "fa4a5dbc-b885-4439-8520-0bfff38438b3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 670.235108] env[69367]: DEBUG nova.compute.manager [req-b328ad72-2082-4bbb-8171-d6aff0f51a7d req-dd79e77a-0bac-41cf-9b83-1a5b416e07c7 service nova] [instance: fa4a5dbc-b885-4439-8520-0bfff38438b3] No waiting events found dispatching network-vif-plugged-b9b8324a-008d-47c5-a1e4-571d6275a798 {{(pid=69367) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 670.235411] env[69367]: WARNING nova.compute.manager [req-b328ad72-2082-4bbb-8171-d6aff0f51a7d req-dd79e77a-0bac-41cf-9b83-1a5b416e07c7 service nova] [instance: fa4a5dbc-b885-4439-8520-0bfff38438b3] Received unexpected event network-vif-plugged-b9b8324a-008d-47c5-a1e4-571d6275a798 for instance with vm_state building and task_state spawning. [ 670.235625] env[69367]: DEBUG nova.compute.manager [req-b328ad72-2082-4bbb-8171-d6aff0f51a7d req-dd79e77a-0bac-41cf-9b83-1a5b416e07c7 service nova] [instance: fa4a5dbc-b885-4439-8520-0bfff38438b3] Received event network-changed-b9b8324a-008d-47c5-a1e4-571d6275a798 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 670.236170] env[69367]: DEBUG nova.compute.manager [req-b328ad72-2082-4bbb-8171-d6aff0f51a7d req-dd79e77a-0bac-41cf-9b83-1a5b416e07c7 service nova] [instance: fa4a5dbc-b885-4439-8520-0bfff38438b3] Refreshing instance network info cache due to event network-changed-b9b8324a-008d-47c5-a1e4-571d6275a798. {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11772}} [ 670.236170] env[69367]: DEBUG oslo_concurrency.lockutils [req-b328ad72-2082-4bbb-8171-d6aff0f51a7d req-dd79e77a-0bac-41cf-9b83-1a5b416e07c7 service nova] Acquiring lock "refresh_cache-fa4a5dbc-b885-4439-8520-0bfff38438b3" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 670.236367] env[69367]: DEBUG oslo_concurrency.lockutils [req-b328ad72-2082-4bbb-8171-d6aff0f51a7d req-dd79e77a-0bac-41cf-9b83-1a5b416e07c7 service nova] Acquired lock "refresh_cache-fa4a5dbc-b885-4439-8520-0bfff38438b3" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 670.236822] env[69367]: DEBUG nova.network.neutron [req-b328ad72-2082-4bbb-8171-d6aff0f51a7d req-dd79e77a-0bac-41cf-9b83-1a5b416e07c7 service nova] [instance: fa4a5dbc-b885-4439-8520-0bfff38438b3] Refreshing network info cache for port b9b8324a-008d-47c5-a1e4-571d6275a798 {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 670.245492] env[69367]: DEBUG oslo_vmware.api [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Waiting for the task: (returnval){ [ 670.245492] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]527fddc0-8aa3-5a71-0505-b26bc8f27b6b" [ 670.245492] env[69367]: _type = "Task" [ 670.245492] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.253870] env[69367]: DEBUG oslo_vmware.api [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]527fddc0-8aa3-5a71-0505-b26bc8f27b6b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.411478] env[69367]: DEBUG nova.network.neutron [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab365570-ac29-4094-be4c-d49563a465c8] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 670.447252] env[69367]: DEBUG oslo_vmware.api [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Task: {'id': task-4233811, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.479583] env[69367]: DEBUG nova.compute.manager [req-1dced25c-9266-4f6b-be4b-85fab9b490af req-43e0e6a8-10d0-4bdd-b1aa-490b5f864d33 service nova] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Received event network-changed-fa738fa1-0be4-4506-8e42-73671661dee1 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 670.479779] env[69367]: DEBUG nova.compute.manager [req-1dced25c-9266-4f6b-be4b-85fab9b490af req-43e0e6a8-10d0-4bdd-b1aa-490b5f864d33 service nova] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Refreshing instance network info cache due to event network-changed-fa738fa1-0be4-4506-8e42-73671661dee1. {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11772}} [ 670.480048] env[69367]: DEBUG oslo_concurrency.lockutils [req-1dced25c-9266-4f6b-be4b-85fab9b490af req-43e0e6a8-10d0-4bdd-b1aa-490b5f864d33 service nova] Acquiring lock "refresh_cache-8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 670.480198] env[69367]: DEBUG oslo_concurrency.lockutils [req-1dced25c-9266-4f6b-be4b-85fab9b490af req-43e0e6a8-10d0-4bdd-b1aa-490b5f864d33 service nova] Acquired lock "refresh_cache-8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 670.480444] env[69367]: DEBUG nova.network.neutron [req-1dced25c-9266-4f6b-be4b-85fab9b490af req-43e0e6a8-10d0-4bdd-b1aa-490b5f864d33 service nova] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Refreshing network info cache for port fa738fa1-0be4-4506-8e42-73671661dee1 {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 670.722542] env[69367]: DEBUG nova.scheduler.client.report [None req-da89b7a9-d8ce-4d71-a741-e22d0c2a0166 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Inventory has not changed for provider 19ddf8be-7305-4f70-8366-52a9957232e6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 670.756562] env[69367]: DEBUG oslo_vmware.api [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]527fddc0-8aa3-5a71-0505-b26bc8f27b6b, 'name': SearchDatastore_Task, 'duration_secs': 0.033832} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.757307] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 670.757307] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore2] fa4a5dbc-b885-4439-8520-0bfff38438b3/fa4a5dbc-b885-4439-8520-0bfff38438b3.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 670.757798] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-31a38f7f-b91f-4889-9edf-82d65dbaf63a {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.768352] env[69367]: DEBUG oslo_vmware.api [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Waiting for the task: (returnval){ [ 670.768352] env[69367]: value = "task-4233812" [ 670.768352] env[69367]: _type = "Task" [ 670.768352] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.780259] env[69367]: DEBUG oslo_vmware.api [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Task: {'id': task-4233812, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.798446] env[69367]: DEBUG nova.compute.manager [req-5e2a679d-ecd3-49cd-98e0-dcebbf9ef5ab req-b03fd931-253a-44c3-b8dd-7937166375f0 service nova] [instance: ab365570-ac29-4094-be4c-d49563a465c8] Received event network-vif-plugged-6de8ad4d-1ee6-4190-bcaa-941184f740e1 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 670.799185] env[69367]: DEBUG oslo_concurrency.lockutils [req-5e2a679d-ecd3-49cd-98e0-dcebbf9ef5ab req-b03fd931-253a-44c3-b8dd-7937166375f0 service nova] Acquiring lock "ab365570-ac29-4094-be4c-d49563a465c8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 670.799185] env[69367]: DEBUG oslo_concurrency.lockutils [req-5e2a679d-ecd3-49cd-98e0-dcebbf9ef5ab req-b03fd931-253a-44c3-b8dd-7937166375f0 service nova] Lock "ab365570-ac29-4094-be4c-d49563a465c8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 670.799609] env[69367]: DEBUG oslo_concurrency.lockutils [req-5e2a679d-ecd3-49cd-98e0-dcebbf9ef5ab req-b03fd931-253a-44c3-b8dd-7937166375f0 service nova] Lock "ab365570-ac29-4094-be4c-d49563a465c8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 670.799741] env[69367]: DEBUG nova.compute.manager [req-5e2a679d-ecd3-49cd-98e0-dcebbf9ef5ab req-b03fd931-253a-44c3-b8dd-7937166375f0 service nova] [instance: ab365570-ac29-4094-be4c-d49563a465c8] No waiting events found dispatching network-vif-plugged-6de8ad4d-1ee6-4190-bcaa-941184f740e1 {{(pid=69367) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 670.799986] env[69367]: WARNING nova.compute.manager [req-5e2a679d-ecd3-49cd-98e0-dcebbf9ef5ab req-b03fd931-253a-44c3-b8dd-7937166375f0 service nova] [instance: ab365570-ac29-4094-be4c-d49563a465c8] Received unexpected event network-vif-plugged-6de8ad4d-1ee6-4190-bcaa-941184f740e1 for instance with vm_state building and task_state spawning. [ 670.800565] env[69367]: DEBUG nova.compute.manager [req-5e2a679d-ecd3-49cd-98e0-dcebbf9ef5ab req-b03fd931-253a-44c3-b8dd-7937166375f0 service nova] [instance: ab365570-ac29-4094-be4c-d49563a465c8] Received event network-changed-6de8ad4d-1ee6-4190-bcaa-941184f740e1 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 670.800811] env[69367]: DEBUG nova.compute.manager [req-5e2a679d-ecd3-49cd-98e0-dcebbf9ef5ab req-b03fd931-253a-44c3-b8dd-7937166375f0 service nova] [instance: ab365570-ac29-4094-be4c-d49563a465c8] Refreshing instance network info cache due to event network-changed-6de8ad4d-1ee6-4190-bcaa-941184f740e1. {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11772}} [ 670.801247] env[69367]: DEBUG oslo_concurrency.lockutils [req-5e2a679d-ecd3-49cd-98e0-dcebbf9ef5ab req-b03fd931-253a-44c3-b8dd-7937166375f0 service nova] Acquiring lock "refresh_cache-ab365570-ac29-4094-be4c-d49563a465c8" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 670.951238] env[69367]: DEBUG oslo_vmware.api [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Task: {'id': task-4233811, 'name': ReconfigVM_Task, 'duration_secs': 0.912443} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.952880] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] [instance: ba4d981a-19f7-41ef-b7d1-a3f3830fe725] Reconfigured VM instance instance-00000013 to attach disk [datastore2] ba4d981a-19f7-41ef-b7d1-a3f3830fe725/ba4d981a-19f7-41ef-b7d1-a3f3830fe725.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 670.953432] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7751d44f-33f3-4f0c-a84e-4fc78b464197 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.962138] env[69367]: DEBUG oslo_vmware.api [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Waiting for the task: (returnval){ [ 670.962138] env[69367]: value = "task-4233813" [ 670.962138] env[69367]: _type = "Task" [ 670.962138] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.975662] env[69367]: DEBUG oslo_vmware.api [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Task: {'id': task-4233813, 'name': Rename_Task} progress is 5%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.143658] env[69367]: DEBUG nova.compute.manager [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] [instance: 92c27615-d377-492f-a9db-ff45b2e71537] Start spawning the instance on the hypervisor. {{(pid=69367) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 671.195694] env[69367]: DEBUG nova.virt.hardware [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-19T12:49:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-19T12:49:28Z,direct_url=,disk_format='vmdk',id=2b099420-9152-4d93-9609-4c9317824c11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cd7c200d5cd6461fb951580f8c764c42',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-19T12:49:29Z,virtual_size=,visibility=), allow threads: False {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 671.195967] env[69367]: DEBUG nova.virt.hardware [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Flavor limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 671.199547] env[69367]: DEBUG nova.virt.hardware [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Image limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 671.200071] env[69367]: DEBUG nova.virt.hardware [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Flavor pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 671.200252] env[69367]: DEBUG nova.virt.hardware [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Image pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 671.200408] env[69367]: DEBUG nova.virt.hardware [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 671.200908] env[69367]: DEBUG nova.virt.hardware [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 671.201269] env[69367]: DEBUG nova.virt.hardware [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 671.202315] env[69367]: DEBUG nova.virt.hardware [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Got 1 possible topologies {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 671.203330] env[69367]: DEBUG nova.virt.hardware [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 671.203680] env[69367]: DEBUG nova.virt.hardware [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 671.205209] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a797cea-2d54-45e3-993e-f616dd3a8d50 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.220715] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01a8b8bf-c51a-40bb-a2a8-6973437904b5 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.238200] env[69367]: DEBUG oslo_concurrency.lockutils [None req-da89b7a9-d8ce-4d71-a741-e22d0c2a0166 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.130s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 671.240120] env[69367]: DEBUG oslo_concurrency.lockutils [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 27.321s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 671.240347] env[69367]: DEBUG nova.objects.instance [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69367) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 671.280040] env[69367]: INFO nova.scheduler.client.report [None req-da89b7a9-d8ce-4d71-a741-e22d0c2a0166 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Deleted allocations for instance 92bdb1b1-d8ab-46b2-9037-ee8fea4642ce [ 671.292283] env[69367]: DEBUG oslo_vmware.api [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Task: {'id': task-4233812, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.428206] env[69367]: DEBUG nova.network.neutron [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab365570-ac29-4094-be4c-d49563a465c8] Updating instance_info_cache with network_info: [{"id": "6de8ad4d-1ee6-4190-bcaa-941184f740e1", "address": "fa:16:3e:95:49:08", "network": {"id": "e89e8083-43a5-4e6e-aa58-98ecffb55d24", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-76221324-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9c8ac08a704e476fbe794f66f61e27a5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d4ef133-b6f3-41d1-add4-92a1482195cf", "external-id": "nsx-vlan-transportzone-446", "segmentation_id": 446, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6de8ad4d-1e", "ovs_interfaceid": "6de8ad4d-1ee6-4190-bcaa-941184f740e1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 671.480025] env[69367]: DEBUG oslo_vmware.api [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Task: {'id': task-4233813, 'name': Rename_Task, 'duration_secs': 0.286994} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.480025] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] [instance: ba4d981a-19f7-41ef-b7d1-a3f3830fe725] Powering on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 671.480025] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-204acd89-fd5e-4222-9401-067e3a89f3c3 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.489780] env[69367]: DEBUG oslo_vmware.api [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Waiting for the task: (returnval){ [ 671.489780] env[69367]: value = "task-4233814" [ 671.489780] env[69367]: _type = "Task" [ 671.489780] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 671.499599] env[69367]: DEBUG oslo_vmware.api [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Task: {'id': task-4233814, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.663130] env[69367]: DEBUG nova.network.neutron [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] Successfully updated port: a1308824-de04-4736-b17a-57bf0eae53ff {{(pid=69367) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 671.786298] env[69367]: DEBUG oslo_vmware.api [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Task: {'id': task-4233812, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.678193} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.787072] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore2] fa4a5dbc-b885-4439-8520-0bfff38438b3/fa4a5dbc-b885-4439-8520-0bfff38438b3.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 671.787272] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] [instance: fa4a5dbc-b885-4439-8520-0bfff38438b3] Extending root virtual disk to 1048576 {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 671.787539] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-73057494-93de-4f64-85a7-82c9972db755 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.793256] env[69367]: DEBUG oslo_concurrency.lockutils [None req-da89b7a9-d8ce-4d71-a741-e22d0c2a0166 tempest-ImagesNegativeTestJSON-1017324503 tempest-ImagesNegativeTestJSON-1017324503-project-member] Lock "92bdb1b1-d8ab-46b2-9037-ee8fea4642ce" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.522s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 671.798292] env[69367]: DEBUG oslo_vmware.api [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Waiting for the task: (returnval){ [ 671.798292] env[69367]: value = "task-4233815" [ 671.798292] env[69367]: _type = "Task" [ 671.798292] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 671.810332] env[69367]: DEBUG oslo_vmware.api [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Task: {'id': task-4233815, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.933316] env[69367]: DEBUG oslo_concurrency.lockutils [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Releasing lock "refresh_cache-ab365570-ac29-4094-be4c-d49563a465c8" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 671.934194] env[69367]: DEBUG nova.compute.manager [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab365570-ac29-4094-be4c-d49563a465c8] Instance network_info: |[{"id": "6de8ad4d-1ee6-4190-bcaa-941184f740e1", "address": "fa:16:3e:95:49:08", "network": {"id": "e89e8083-43a5-4e6e-aa58-98ecffb55d24", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-76221324-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9c8ac08a704e476fbe794f66f61e27a5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d4ef133-b6f3-41d1-add4-92a1482195cf", "external-id": "nsx-vlan-transportzone-446", "segmentation_id": 446, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6de8ad4d-1e", "ovs_interfaceid": "6de8ad4d-1ee6-4190-bcaa-941184f740e1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69367) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 671.934749] env[69367]: DEBUG oslo_concurrency.lockutils [req-5e2a679d-ecd3-49cd-98e0-dcebbf9ef5ab req-b03fd931-253a-44c3-b8dd-7937166375f0 service nova] Acquired lock "refresh_cache-ab365570-ac29-4094-be4c-d49563a465c8" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 671.934945] env[69367]: DEBUG nova.network.neutron [req-5e2a679d-ecd3-49cd-98e0-dcebbf9ef5ab req-b03fd931-253a-44c3-b8dd-7937166375f0 service nova] [instance: ab365570-ac29-4094-be4c-d49563a465c8] Refreshing network info cache for port 6de8ad4d-1ee6-4190-bcaa-941184f740e1 {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 671.940052] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab365570-ac29-4094-be4c-d49563a465c8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:95:49:08', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6d4ef133-b6f3-41d1-add4-92a1482195cf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6de8ad4d-1ee6-4190-bcaa-941184f740e1', 'vif_model': 'vmxnet3'}] {{(pid=69367) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 671.948568] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Creating folder: Project (9c8ac08a704e476fbe794f66f61e27a5). Parent ref: group-v837645. {{(pid=69367) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 671.952264] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a963061e-a7d8-45f0-ba8c-aa792026e708 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.966861] env[69367]: INFO nova.virt.vmwareapi.vm_util [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Created folder: Project (9c8ac08a704e476fbe794f66f61e27a5) in parent group-v837645. [ 671.967133] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Creating folder: Instances. Parent ref: group-v837686. {{(pid=69367) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 671.967500] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c0931128-5f89-4ba8-b087-4e0f6b47e882 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.980279] env[69367]: INFO nova.virt.vmwareapi.vm_util [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Created folder: Instances in parent group-v837686. [ 671.980624] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 671.980624] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ab365570-ac29-4094-be4c-d49563a465c8] Creating VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 671.980756] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-53bf1ca9-4ccc-4519-b240-82e31c5df67b {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.012254] env[69367]: DEBUG oslo_vmware.api [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Task: {'id': task-4233814, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.012254] env[69367]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 672.012254] env[69367]: value = "task-4233818" [ 672.012254] env[69367]: _type = "Task" [ 672.012254] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.166508] env[69367]: DEBUG oslo_concurrency.lockutils [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Acquiring lock "refresh_cache-c17525ee-d038-4c81-932b-ed74a6de6cb5" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 672.167224] env[69367]: DEBUG oslo_concurrency.lockutils [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Acquired lock "refresh_cache-c17525ee-d038-4c81-932b-ed74a6de6cb5" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 672.167552] env[69367]: DEBUG nova.network.neutron [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 672.252552] env[69367]: DEBUG oslo_concurrency.lockutils [None req-9bbf173d-3433-4657-92c2-dbef79888703 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.012s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 672.254482] env[69367]: DEBUG oslo_concurrency.lockutils [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.539s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 672.258315] env[69367]: INFO nova.compute.claims [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 672.314198] env[69367]: DEBUG oslo_vmware.api [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Task: {'id': task-4233815, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071437} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.317015] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] [instance: fa4a5dbc-b885-4439-8520-0bfff38438b3] Extended root virtual disk {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 672.317962] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73d7f55f-66e7-4a5c-a6fa-1c19169c7059 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.345793] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] [instance: fa4a5dbc-b885-4439-8520-0bfff38438b3] Reconfiguring VM instance instance-00000014 to attach disk [datastore2] fa4a5dbc-b885-4439-8520-0bfff38438b3/fa4a5dbc-b885-4439-8520-0bfff38438b3.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 672.347702] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9140ba03-fefd-4ef4-8f93-2a0ba35c9737 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.374181] env[69367]: DEBUG oslo_vmware.api [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Waiting for the task: (returnval){ [ 672.374181] env[69367]: value = "task-4233819" [ 672.374181] env[69367]: _type = "Task" [ 672.374181] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.386956] env[69367]: DEBUG oslo_vmware.api [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Task: {'id': task-4233819, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.512260] env[69367]: DEBUG oslo_vmware.api [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Task: {'id': task-4233814, 'name': PowerOnVM_Task, 'duration_secs': 0.797813} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.512705] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] [instance: ba4d981a-19f7-41ef-b7d1-a3f3830fe725] Powered on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 672.513957] env[69367]: INFO nova.compute.manager [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] [instance: ba4d981a-19f7-41ef-b7d1-a3f3830fe725] Took 15.79 seconds to spawn the instance on the hypervisor. [ 672.513957] env[69367]: DEBUG nova.compute.manager [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] [instance: ba4d981a-19f7-41ef-b7d1-a3f3830fe725] Checking state {{(pid=69367) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 672.519017] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3abb4cde-0bf4-4c50-b170-99f8d14361e7 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.526750] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4233818, 'name': CreateVM_Task, 'duration_secs': 0.434816} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.528340] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ab365570-ac29-4094-be4c-d49563a465c8] Created VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 672.540039] env[69367]: DEBUG oslo_concurrency.lockutils [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 672.540039] env[69367]: DEBUG oslo_concurrency.lockutils [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 672.540039] env[69367]: DEBUG oslo_concurrency.lockutils [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 672.540039] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-19413800-3555-41ea-8a56-9d496b2d98e5 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.546750] env[69367]: DEBUG oslo_vmware.api [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Waiting for the task: (returnval){ [ 672.546750] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52b0e7d3-6106-02e9-2796-1df8f1c0f451" [ 672.546750] env[69367]: _type = "Task" [ 672.546750] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.560714] env[69367]: DEBUG oslo_vmware.api [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52b0e7d3-6106-02e9-2796-1df8f1c0f451, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.673288] env[69367]: DEBUG nova.network.neutron [req-b328ad72-2082-4bbb-8171-d6aff0f51a7d req-dd79e77a-0bac-41cf-9b83-1a5b416e07c7 service nova] [instance: fa4a5dbc-b885-4439-8520-0bfff38438b3] Updated VIF entry in instance network info cache for port b9b8324a-008d-47c5-a1e4-571d6275a798. {{(pid=69367) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 672.676652] env[69367]: DEBUG nova.network.neutron [req-b328ad72-2082-4bbb-8171-d6aff0f51a7d req-dd79e77a-0bac-41cf-9b83-1a5b416e07c7 service nova] [instance: fa4a5dbc-b885-4439-8520-0bfff38438b3] Updating instance_info_cache with network_info: [{"id": "b9b8324a-008d-47c5-a1e4-571d6275a798", "address": "fa:16:3e:57:23:3b", "network": {"id": "e960e682-22ec-43d9-b511-c1b4f0dd64a4", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1877720464-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eaf9cd789bbf45df90fb39796f90f041", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2bf99f85-3a5c-47c6-a603-e215be6ab0bd", "external-id": "nsx-vlan-transportzone-855", "segmentation_id": 855, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb9b8324a-00", "ovs_interfaceid": "b9b8324a-008d-47c5-a1e4-571d6275a798", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 672.730284] env[69367]: DEBUG nova.network.neutron [req-5e2a679d-ecd3-49cd-98e0-dcebbf9ef5ab req-b03fd931-253a-44c3-b8dd-7937166375f0 service nova] [instance: ab365570-ac29-4094-be4c-d49563a465c8] Updated VIF entry in instance network info cache for port 6de8ad4d-1ee6-4190-bcaa-941184f740e1. {{(pid=69367) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 672.730284] env[69367]: DEBUG nova.network.neutron [req-5e2a679d-ecd3-49cd-98e0-dcebbf9ef5ab req-b03fd931-253a-44c3-b8dd-7937166375f0 service nova] [instance: ab365570-ac29-4094-be4c-d49563a465c8] Updating instance_info_cache with network_info: [{"id": "6de8ad4d-1ee6-4190-bcaa-941184f740e1", "address": "fa:16:3e:95:49:08", "network": {"id": "e89e8083-43a5-4e6e-aa58-98ecffb55d24", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-76221324-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9c8ac08a704e476fbe794f66f61e27a5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d4ef133-b6f3-41d1-add4-92a1482195cf", "external-id": "nsx-vlan-transportzone-446", "segmentation_id": 446, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6de8ad4d-1e", "ovs_interfaceid": "6de8ad4d-1ee6-4190-bcaa-941184f740e1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 672.838908] env[69367]: DEBUG nova.network.neutron [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] [instance: 92c27615-d377-492f-a9db-ff45b2e71537] Successfully created port: 495ec91b-986e-4b74-820f-28ae7f03a86a {{(pid=69367) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 672.887484] env[69367]: DEBUG oslo_vmware.api [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Task: {'id': task-4233819, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.890459] env[69367]: DEBUG nova.network.neutron [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 673.054989] env[69367]: INFO nova.compute.manager [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] [instance: ba4d981a-19f7-41ef-b7d1-a3f3830fe725] Took 36.15 seconds to build instance. [ 673.067068] env[69367]: DEBUG oslo_vmware.api [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52b0e7d3-6106-02e9-2796-1df8f1c0f451, 'name': SearchDatastore_Task, 'duration_secs': 0.021811} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.068112] env[69367]: DEBUG nova.network.neutron [req-1dced25c-9266-4f6b-be4b-85fab9b490af req-43e0e6a8-10d0-4bdd-b1aa-490b5f864d33 service nova] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Updated VIF entry in instance network info cache for port fa738fa1-0be4-4506-8e42-73671661dee1. {{(pid=69367) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 673.068446] env[69367]: DEBUG nova.network.neutron [req-1dced25c-9266-4f6b-be4b-85fab9b490af req-43e0e6a8-10d0-4bdd-b1aa-490b5f864d33 service nova] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Updating instance_info_cache with network_info: [{"id": "fa738fa1-0be4-4506-8e42-73671661dee1", "address": "fa:16:3e:c7:7c:7c", "network": {"id": "0b0a82c2-1a70-4174-a75e-5863d3505b2c", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1091682254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.159", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "26a89ab4163e4b9a801dcbf11c953cf3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfa738fa1-0b", "ovs_interfaceid": "fa738fa1-0be4-4506-8e42-73671661dee1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 673.070354] env[69367]: DEBUG oslo_concurrency.lockutils [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 673.070484] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab365570-ac29-4094-be4c-d49563a465c8] Processing image 2b099420-9152-4d93-9609-4c9317824c11 {{(pid=69367) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 673.070718] env[69367]: DEBUG oslo_concurrency.lockutils [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 673.070883] env[69367]: DEBUG oslo_concurrency.lockutils [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 673.071080] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 673.071586] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-648968dd-8575-4ff0-af5f-b6d9174c0d78 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.096570] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 673.096765] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69367) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 673.097548] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-83d80d84-501e-4b20-88c2-a314f7df51d3 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.106046] env[69367]: DEBUG oslo_vmware.api [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Waiting for the task: (returnval){ [ 673.106046] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52e7d283-a33c-8f5e-048d-d98a821b7bbd" [ 673.106046] env[69367]: _type = "Task" [ 673.106046] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.120846] env[69367]: DEBUG oslo_vmware.api [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52e7d283-a33c-8f5e-048d-d98a821b7bbd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.180295] env[69367]: DEBUG oslo_concurrency.lockutils [req-b328ad72-2082-4bbb-8171-d6aff0f51a7d req-dd79e77a-0bac-41cf-9b83-1a5b416e07c7 service nova] Releasing lock "refresh_cache-fa4a5dbc-b885-4439-8520-0bfff38438b3" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 673.231145] env[69367]: DEBUG oslo_concurrency.lockutils [req-5e2a679d-ecd3-49cd-98e0-dcebbf9ef5ab req-b03fd931-253a-44c3-b8dd-7937166375f0 service nova] Releasing lock "refresh_cache-ab365570-ac29-4094-be4c-d49563a465c8" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 673.386319] env[69367]: DEBUG oslo_vmware.api [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Task: {'id': task-4233819, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.406136] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Acquiring lock "788b843c-1496-4562-a761-44f3e1ce6da2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 673.406136] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Lock "788b843c-1496-4562-a761-44f3e1ce6da2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 673.443187] env[69367]: DEBUG nova.compute.manager [req-5ea1c1f8-2b54-47b7-a755-c756b0a233f2 req-f99f9d9c-7dec-4724-8579-18851c593a94 service nova] [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] Received event network-vif-plugged-a1308824-de04-4736-b17a-57bf0eae53ff {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 673.443408] env[69367]: DEBUG oslo_concurrency.lockutils [req-5ea1c1f8-2b54-47b7-a755-c756b0a233f2 req-f99f9d9c-7dec-4724-8579-18851c593a94 service nova] Acquiring lock "c17525ee-d038-4c81-932b-ed74a6de6cb5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 673.443626] env[69367]: DEBUG oslo_concurrency.lockutils [req-5ea1c1f8-2b54-47b7-a755-c756b0a233f2 req-f99f9d9c-7dec-4724-8579-18851c593a94 service nova] Lock "c17525ee-d038-4c81-932b-ed74a6de6cb5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 673.443804] env[69367]: DEBUG oslo_concurrency.lockutils [req-5ea1c1f8-2b54-47b7-a755-c756b0a233f2 req-f99f9d9c-7dec-4724-8579-18851c593a94 service nova] Lock "c17525ee-d038-4c81-932b-ed74a6de6cb5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 673.444066] env[69367]: DEBUG nova.compute.manager [req-5ea1c1f8-2b54-47b7-a755-c756b0a233f2 req-f99f9d9c-7dec-4724-8579-18851c593a94 service nova] [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] No waiting events found dispatching network-vif-plugged-a1308824-de04-4736-b17a-57bf0eae53ff {{(pid=69367) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 673.444236] env[69367]: WARNING nova.compute.manager [req-5ea1c1f8-2b54-47b7-a755-c756b0a233f2 req-f99f9d9c-7dec-4724-8579-18851c593a94 service nova] [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] Received unexpected event network-vif-plugged-a1308824-de04-4736-b17a-57bf0eae53ff for instance with vm_state building and task_state spawning. [ 673.444326] env[69367]: DEBUG nova.compute.manager [req-5ea1c1f8-2b54-47b7-a755-c756b0a233f2 req-f99f9d9c-7dec-4724-8579-18851c593a94 service nova] [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] Received event network-changed-a1308824-de04-4736-b17a-57bf0eae53ff {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 673.444481] env[69367]: DEBUG nova.compute.manager [req-5ea1c1f8-2b54-47b7-a755-c756b0a233f2 req-f99f9d9c-7dec-4724-8579-18851c593a94 service nova] [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] Refreshing instance network info cache due to event network-changed-a1308824-de04-4736-b17a-57bf0eae53ff. {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11772}} [ 673.444650] env[69367]: DEBUG oslo_concurrency.lockutils [req-5ea1c1f8-2b54-47b7-a755-c756b0a233f2 req-f99f9d9c-7dec-4724-8579-18851c593a94 service nova] Acquiring lock "refresh_cache-c17525ee-d038-4c81-932b-ed74a6de6cb5" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 673.557318] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fc0b5fcd-3d01-44ca-949e-8111b34eda65 tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Lock "ba4d981a-19f7-41ef-b7d1-a3f3830fe725" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.335s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 673.573342] env[69367]: DEBUG oslo_concurrency.lockutils [req-1dced25c-9266-4f6b-be4b-85fab9b490af req-43e0e6a8-10d0-4bdd-b1aa-490b5f864d33 service nova] Releasing lock "refresh_cache-8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 673.628547] env[69367]: DEBUG oslo_vmware.api [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52e7d283-a33c-8f5e-048d-d98a821b7bbd, 'name': SearchDatastore_Task, 'duration_secs': 0.015305} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.633830] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e2b6dd8f-7826-4958-aaea-23f805cea46e {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.643725] env[69367]: DEBUG oslo_vmware.api [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Waiting for the task: (returnval){ [ 673.643725] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52aa002b-edde-0f70-ac05-52a3df8ad98b" [ 673.643725] env[69367]: _type = "Task" [ 673.643725] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.663805] env[69367]: DEBUG oslo_vmware.api [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52aa002b-edde-0f70-ac05-52a3df8ad98b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.694778] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f9f8e96-4e5b-4714-baff-1dd6499f78cc {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.703230] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db959d29-356b-456f-bf45-a81c84e1e405 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.738866] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a75a2ce-2c86-465a-8cde-94e29b557bfe {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.744871] env[69367]: DEBUG nova.network.neutron [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] Updating instance_info_cache with network_info: [{"id": "a1308824-de04-4736-b17a-57bf0eae53ff", "address": "fa:16:3e:75:f5:24", "network": {"id": "e89e8083-43a5-4e6e-aa58-98ecffb55d24", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-76221324-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9c8ac08a704e476fbe794f66f61e27a5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d4ef133-b6f3-41d1-add4-92a1482195cf", "external-id": "nsx-vlan-transportzone-446", "segmentation_id": 446, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa1308824-de", "ovs_interfaceid": "a1308824-de04-4736-b17a-57bf0eae53ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 673.751870] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41e432e4-a9a3-457a-94ce-4df099a9ef87 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.767308] env[69367]: DEBUG nova.compute.provider_tree [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Inventory has not changed in ProviderTree for provider: 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 673.886763] env[69367]: DEBUG oslo_vmware.api [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Task: {'id': task-4233819, 'name': ReconfigVM_Task, 'duration_secs': 1.39643} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.887786] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] [instance: fa4a5dbc-b885-4439-8520-0bfff38438b3] Reconfigured VM instance instance-00000014 to attach disk [datastore2] fa4a5dbc-b885-4439-8520-0bfff38438b3/fa4a5dbc-b885-4439-8520-0bfff38438b3.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 673.888488] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-16093781-3620-4dc3-b93a-771fe934c2d2 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.895765] env[69367]: DEBUG oslo_vmware.api [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Waiting for the task: (returnval){ [ 673.895765] env[69367]: value = "task-4233820" [ 673.895765] env[69367]: _type = "Task" [ 673.895765] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.911929] env[69367]: DEBUG oslo_vmware.api [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Task: {'id': task-4233820, 'name': Rename_Task} progress is 6%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.064072] env[69367]: DEBUG nova.compute.manager [None req-aabb4e8c-4072-4fce-bdec-9388128f54ff tempest-MigrationsAdminTest-1077393646 tempest-MigrationsAdminTest-1077393646-project-member] [instance: 27267edf-97f5-4238-8d9a-c2ddf0bb252c] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 674.154903] env[69367]: DEBUG oslo_vmware.api [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52aa002b-edde-0f70-ac05-52a3df8ad98b, 'name': SearchDatastore_Task, 'duration_secs': 0.034122} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.155358] env[69367]: DEBUG oslo_concurrency.lockutils [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 674.155962] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore1] ab365570-ac29-4094-be4c-d49563a465c8/ab365570-ac29-4094-be4c-d49563a465c8.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 674.155962] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a9ce32c7-e073-42bf-9588-08244c37490b {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.164493] env[69367]: DEBUG oslo_vmware.api [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Waiting for the task: (returnval){ [ 674.164493] env[69367]: value = "task-4233821" [ 674.164493] env[69367]: _type = "Task" [ 674.164493] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.176889] env[69367]: DEBUG oslo_vmware.api [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': task-4233821, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.247966] env[69367]: DEBUG oslo_concurrency.lockutils [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Releasing lock "refresh_cache-c17525ee-d038-4c81-932b-ed74a6de6cb5" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 674.248411] env[69367]: DEBUG nova.compute.manager [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] Instance network_info: |[{"id": "a1308824-de04-4736-b17a-57bf0eae53ff", "address": "fa:16:3e:75:f5:24", "network": {"id": "e89e8083-43a5-4e6e-aa58-98ecffb55d24", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-76221324-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9c8ac08a704e476fbe794f66f61e27a5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d4ef133-b6f3-41d1-add4-92a1482195cf", "external-id": "nsx-vlan-transportzone-446", "segmentation_id": 446, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa1308824-de", "ovs_interfaceid": "a1308824-de04-4736-b17a-57bf0eae53ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69367) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 674.248750] env[69367]: DEBUG oslo_concurrency.lockutils [req-5ea1c1f8-2b54-47b7-a755-c756b0a233f2 req-f99f9d9c-7dec-4724-8579-18851c593a94 service nova] Acquired lock "refresh_cache-c17525ee-d038-4c81-932b-ed74a6de6cb5" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 674.248953] env[69367]: DEBUG nova.network.neutron [req-5ea1c1f8-2b54-47b7-a755-c756b0a233f2 req-f99f9d9c-7dec-4724-8579-18851c593a94 service nova] [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] Refreshing network info cache for port a1308824-de04-4736-b17a-57bf0eae53ff {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 674.256765] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:75:f5:24', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6d4ef133-b6f3-41d1-add4-92a1482195cf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a1308824-de04-4736-b17a-57bf0eae53ff', 'vif_model': 'vmxnet3'}] {{(pid=69367) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 674.268093] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 674.269199] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] Creating VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 674.270198] env[69367]: DEBUG nova.scheduler.client.report [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Inventory has not changed for provider 19ddf8be-7305-4f70-8366-52a9957232e6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 674.274721] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a03f5349-0fcf-4920-ac7e-df4826bb50fa {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.302019] env[69367]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 674.302019] env[69367]: value = "task-4233822" [ 674.302019] env[69367]: _type = "Task" [ 674.302019] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.311302] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4233822, 'name': CreateVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.415779] env[69367]: DEBUG oslo_vmware.api [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Task: {'id': task-4233820, 'name': Rename_Task, 'duration_secs': 0.202501} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.416358] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] [instance: fa4a5dbc-b885-4439-8520-0bfff38438b3] Powering on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 674.416476] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3fd4b92e-dbb1-4e70-b0be-2461ca5cb6c2 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.427034] env[69367]: DEBUG oslo_vmware.api [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Waiting for the task: (returnval){ [ 674.427034] env[69367]: value = "task-4233823" [ 674.427034] env[69367]: _type = "Task" [ 674.427034] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.441218] env[69367]: DEBUG oslo_vmware.api [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Task: {'id': task-4233823, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.617782] env[69367]: DEBUG oslo_concurrency.lockutils [None req-aabb4e8c-4072-4fce-bdec-9388128f54ff tempest-MigrationsAdminTest-1077393646 tempest-MigrationsAdminTest-1077393646-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 674.683716] env[69367]: DEBUG oslo_vmware.api [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': task-4233821, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.795042] env[69367]: DEBUG oslo_concurrency.lockutils [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.540s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 674.795865] env[69367]: DEBUG nova.compute.manager [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Start building networks asynchronously for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 674.801132] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0cf850bb-f4f3-4902-82da-ca6e283aae47 tempest-FloatingIPsAssociationNegativeTestJSON-1638525184 tempest-FloatingIPsAssociationNegativeTestJSON-1638525184-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.966s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 674.804174] env[69367]: INFO nova.compute.claims [None req-0cf850bb-f4f3-4902-82da-ca6e283aae47 tempest-FloatingIPsAssociationNegativeTestJSON-1638525184 tempest-FloatingIPsAssociationNegativeTestJSON-1638525184-project-member] [instance: 8001cca4-9b9f-4425-b6e4-d27866395886] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 674.822250] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4233822, 'name': CreateVM_Task, 'duration_secs': 0.465163} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.822250] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] Created VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 674.822250] env[69367]: DEBUG oslo_concurrency.lockutils [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 674.822250] env[69367]: DEBUG oslo_concurrency.lockutils [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 674.822250] env[69367]: DEBUG oslo_concurrency.lockutils [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 674.822786] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f6107c2b-cf3d-4979-950b-596f2204bd56 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.835588] env[69367]: DEBUG oslo_vmware.api [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Waiting for the task: (returnval){ [ 674.835588] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52604ebd-778a-aaa7-8063-6bd5b3630258" [ 674.835588] env[69367]: _type = "Task" [ 674.835588] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.854056] env[69367]: DEBUG oslo_vmware.api [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52604ebd-778a-aaa7-8063-6bd5b3630258, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.941979] env[69367]: DEBUG oslo_vmware.api [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Task: {'id': task-4233823, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.177776] env[69367]: DEBUG oslo_vmware.api [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': task-4233821, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.7267} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.178043] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore1] ab365570-ac29-4094-be4c-d49563a465c8/ab365570-ac29-4094-be4c-d49563a465c8.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 675.178281] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab365570-ac29-4094-be4c-d49563a465c8] Extending root virtual disk to 1048576 {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 675.178539] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-94c09122-d975-4840-b552-c232636563e6 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.187819] env[69367]: DEBUG oslo_vmware.api [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Waiting for the task: (returnval){ [ 675.187819] env[69367]: value = "task-4233824" [ 675.187819] env[69367]: _type = "Task" [ 675.187819] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.202451] env[69367]: DEBUG oslo_vmware.api [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': task-4233824, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.310443] env[69367]: DEBUG nova.compute.utils [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Using /dev/sd instead of None {{(pid=69367) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 675.311917] env[69367]: DEBUG nova.compute.manager [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Allocating IP information in the background. {{(pid=69367) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 675.316868] env[69367]: DEBUG nova.network.neutron [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] allocate_for_instance() {{(pid=69367) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 675.347781] env[69367]: DEBUG oslo_vmware.api [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52604ebd-778a-aaa7-8063-6bd5b3630258, 'name': SearchDatastore_Task, 'duration_secs': 0.042568} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.348127] env[69367]: DEBUG oslo_concurrency.lockutils [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 675.348366] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] Processing image 2b099420-9152-4d93-9609-4c9317824c11 {{(pid=69367) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 675.348601] env[69367]: DEBUG oslo_concurrency.lockutils [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 675.348747] env[69367]: DEBUG oslo_concurrency.lockutils [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 675.348930] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 675.349738] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1c4d71f9-918a-4188-a0a3-4e1bbb2b55e3 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.365639] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 675.365848] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69367) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 675.368199] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e4c9b326-bc6b-42f0-aa14-6f6e4556310a {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.378112] env[69367]: DEBUG oslo_vmware.api [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Waiting for the task: (returnval){ [ 675.378112] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]525c2687-7a37-3b9c-726a-a631239045c8" [ 675.378112] env[69367]: _type = "Task" [ 675.378112] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.389404] env[69367]: DEBUG oslo_vmware.api [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]525c2687-7a37-3b9c-726a-a631239045c8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.440810] env[69367]: DEBUG oslo_vmware.api [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Task: {'id': task-4233823, 'name': PowerOnVM_Task, 'duration_secs': 0.697399} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.441188] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] [instance: fa4a5dbc-b885-4439-8520-0bfff38438b3] Powered on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 675.442349] env[69367]: INFO nova.compute.manager [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] [instance: fa4a5dbc-b885-4439-8520-0bfff38438b3] Took 16.20 seconds to spawn the instance on the hypervisor. [ 675.442571] env[69367]: DEBUG nova.compute.manager [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] [instance: fa4a5dbc-b885-4439-8520-0bfff38438b3] Checking state {{(pid=69367) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 675.445934] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e43269e-743e-4832-913e-ac0ecf6a8bd2 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.517485] env[69367]: DEBUG nova.policy [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '58ecc6f48c8e44328ea397d92e025c4e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd73aa45fc35b44c6a8a2dd8ef127b974', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69367) authorize /opt/stack/nova/nova/policy.py:192}} [ 675.708440] env[69367]: DEBUG oslo_vmware.api [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': task-4233824, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.109133} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.708776] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab365570-ac29-4094-be4c-d49563a465c8] Extended root virtual disk {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 675.709888] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6d9dfba-c529-4831-a652-c89bd6dc3841 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.739487] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab365570-ac29-4094-be4c-d49563a465c8] Reconfiguring VM instance instance-00000015 to attach disk [datastore1] ab365570-ac29-4094-be4c-d49563a465c8/ab365570-ac29-4094-be4c-d49563a465c8.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 675.740503] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-467f3f9c-b515-42e7-b3f7-12784f2d32c1 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.766075] env[69367]: DEBUG oslo_vmware.api [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Waiting for the task: (returnval){ [ 675.766075] env[69367]: value = "task-4233825" [ 675.766075] env[69367]: _type = "Task" [ 675.766075] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.776498] env[69367]: DEBUG oslo_vmware.api [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': task-4233825, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.823563] env[69367]: DEBUG nova.compute.manager [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Start building block device mappings for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 675.893058] env[69367]: DEBUG oslo_vmware.api [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]525c2687-7a37-3b9c-726a-a631239045c8, 'name': SearchDatastore_Task, 'duration_secs': 0.045877} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.895430] env[69367]: DEBUG nova.network.neutron [req-5ea1c1f8-2b54-47b7-a755-c756b0a233f2 req-f99f9d9c-7dec-4724-8579-18851c593a94 service nova] [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] Updated VIF entry in instance network info cache for port a1308824-de04-4736-b17a-57bf0eae53ff. {{(pid=69367) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 675.895430] env[69367]: DEBUG nova.network.neutron [req-5ea1c1f8-2b54-47b7-a755-c756b0a233f2 req-f99f9d9c-7dec-4724-8579-18851c593a94 service nova] [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] Updating instance_info_cache with network_info: [{"id": "a1308824-de04-4736-b17a-57bf0eae53ff", "address": "fa:16:3e:75:f5:24", "network": {"id": "e89e8083-43a5-4e6e-aa58-98ecffb55d24", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-76221324-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9c8ac08a704e476fbe794f66f61e27a5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d4ef133-b6f3-41d1-add4-92a1482195cf", "external-id": "nsx-vlan-transportzone-446", "segmentation_id": 446, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa1308824-de", "ovs_interfaceid": "a1308824-de04-4736-b17a-57bf0eae53ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 675.896659] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-548b4c54-26dc-4848-bdd2-008703e6dc85 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.908410] env[69367]: DEBUG oslo_vmware.api [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Waiting for the task: (returnval){ [ 675.908410] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]5249e7c3-fe38-4ea3-5a36-74f285db86dd" [ 675.908410] env[69367]: _type = "Task" [ 675.908410] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.938974] env[69367]: DEBUG oslo_vmware.api [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]5249e7c3-fe38-4ea3-5a36-74f285db86dd, 'name': SearchDatastore_Task, 'duration_secs': 0.023481} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.938974] env[69367]: DEBUG oslo_concurrency.lockutils [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 675.939579] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore1] c17525ee-d038-4c81-932b-ed74a6de6cb5/c17525ee-d038-4c81-932b-ed74a6de6cb5.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 675.940175] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6ecf70e5-75bb-43ce-bd0b-36a340f6d8ed {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.958759] env[69367]: DEBUG oslo_vmware.api [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Waiting for the task: (returnval){ [ 675.958759] env[69367]: value = "task-4233826" [ 675.958759] env[69367]: _type = "Task" [ 675.958759] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.974135] env[69367]: DEBUG oslo_vmware.api [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': task-4233826, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.974753] env[69367]: INFO nova.compute.manager [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] [instance: fa4a5dbc-b885-4439-8520-0bfff38438b3] Took 38.39 seconds to build instance. [ 676.102243] env[69367]: DEBUG oslo_concurrency.lockutils [None req-4a4e2df7-def0-4f89-8bf6-a91349a96fef tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Acquiring lock "3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 676.102654] env[69367]: DEBUG oslo_concurrency.lockutils [None req-4a4e2df7-def0-4f89-8bf6-a91349a96fef tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Lock "3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 676.104029] env[69367]: DEBUG oslo_concurrency.lockutils [None req-4a4e2df7-def0-4f89-8bf6-a91349a96fef tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Acquiring lock "3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 676.104029] env[69367]: DEBUG oslo_concurrency.lockutils [None req-4a4e2df7-def0-4f89-8bf6-a91349a96fef tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Lock "3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 676.104029] env[69367]: DEBUG oslo_concurrency.lockutils [None req-4a4e2df7-def0-4f89-8bf6-a91349a96fef tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Lock "3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 676.105545] env[69367]: INFO nova.compute.manager [None req-4a4e2df7-def0-4f89-8bf6-a91349a96fef tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] Terminating instance [ 676.284889] env[69367]: DEBUG oslo_vmware.api [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': task-4233825, 'name': ReconfigVM_Task, 'duration_secs': 0.327318} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.284889] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab365570-ac29-4094-be4c-d49563a465c8] Reconfigured VM instance instance-00000015 to attach disk [datastore1] ab365570-ac29-4094-be4c-d49563a465c8/ab365570-ac29-4094-be4c-d49563a465c8.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 676.284889] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f777439e-b6bf-4bff-9d7c-35cc9801f040 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.293074] env[69367]: DEBUG oslo_vmware.api [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Waiting for the task: (returnval){ [ 676.293074] env[69367]: value = "task-4233827" [ 676.293074] env[69367]: _type = "Task" [ 676.293074] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.303202] env[69367]: DEBUG oslo_vmware.api [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': task-4233827, 'name': Rename_Task} progress is 5%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.310240] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1aed77d0-6040-4747-9f61-4b3046127250 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.320018] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4c03fbc-a665-4dfe-9868-ba3a98547a02 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.362969] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af3ac08a-d338-4e33-874b-8301f6242444 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.372876] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45cc2ad2-0830-4e2a-a586-7280ff080709 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.391638] env[69367]: DEBUG nova.compute.provider_tree [None req-0cf850bb-f4f3-4902-82da-ca6e283aae47 tempest-FloatingIPsAssociationNegativeTestJSON-1638525184 tempest-FloatingIPsAssociationNegativeTestJSON-1638525184-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 676.396950] env[69367]: DEBUG oslo_concurrency.lockutils [req-5ea1c1f8-2b54-47b7-a755-c756b0a233f2 req-f99f9d9c-7dec-4724-8579-18851c593a94 service nova] Releasing lock "refresh_cache-c17525ee-d038-4c81-932b-ed74a6de6cb5" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 676.470109] env[69367]: DEBUG oslo_vmware.api [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': task-4233826, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.477205] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5942d654-cc5c-4cde-951d-9fa7311a189d tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Lock "fa4a5dbc-b885-4439-8520-0bfff38438b3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.772s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 676.610883] env[69367]: DEBUG nova.compute.manager [None req-4a4e2df7-def0-4f89-8bf6-a91349a96fef tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] Start destroying the instance on the hypervisor. {{(pid=69367) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 676.611141] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-4a4e2df7-def0-4f89-8bf6-a91349a96fef tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] Destroying instance {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 676.612270] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f43f064e-dd2e-4e31-b18d-27640fb660a3 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.630282] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a4e2df7-def0-4f89-8bf6-a91349a96fef tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] Powering off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 676.630599] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-65d35dc8-ce5a-4b7f-85e5-5b861eccbf5f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.640357] env[69367]: DEBUG oslo_vmware.api [None req-4a4e2df7-def0-4f89-8bf6-a91349a96fef tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Waiting for the task: (returnval){ [ 676.640357] env[69367]: value = "task-4233828" [ 676.640357] env[69367]: _type = "Task" [ 676.640357] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.658660] env[69367]: DEBUG oslo_vmware.api [None req-4a4e2df7-def0-4f89-8bf6-a91349a96fef tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Task: {'id': task-4233828, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.806641] env[69367]: DEBUG oslo_vmware.api [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': task-4233827, 'name': Rename_Task, 'duration_secs': 0.163458} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.806984] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab365570-ac29-4094-be4c-d49563a465c8] Powering on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 676.807985] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c866a13d-f482-400c-a367-4404143ea56c {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.817882] env[69367]: DEBUG oslo_vmware.api [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Waiting for the task: (returnval){ [ 676.817882] env[69367]: value = "task-4233829" [ 676.817882] env[69367]: _type = "Task" [ 676.817882] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.829919] env[69367]: DEBUG oslo_vmware.api [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': task-4233829, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.865398] env[69367]: DEBUG nova.compute.manager [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Start spawning the instance on the hypervisor. {{(pid=69367) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 676.915827] env[69367]: DEBUG nova.virt.hardware [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-19T12:49:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-19T12:49:28Z,direct_url=,disk_format='vmdk',id=2b099420-9152-4d93-9609-4c9317824c11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cd7c200d5cd6461fb951580f8c764c42',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-19T12:49:29Z,virtual_size=,visibility=), allow threads: False {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 676.916466] env[69367]: DEBUG nova.virt.hardware [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Flavor limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 676.916776] env[69367]: DEBUG nova.virt.hardware [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Image limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 676.917635] env[69367]: DEBUG nova.virt.hardware [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Flavor pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 676.917964] env[69367]: DEBUG nova.virt.hardware [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Image pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 676.918770] env[69367]: DEBUG nova.virt.hardware [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 676.919072] env[69367]: DEBUG nova.virt.hardware [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 676.920163] env[69367]: DEBUG nova.virt.hardware [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 676.920163] env[69367]: DEBUG nova.virt.hardware [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Got 1 possible topologies {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 676.920163] env[69367]: DEBUG nova.virt.hardware [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 676.920334] env[69367]: DEBUG nova.virt.hardware [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 676.921274] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ced81a5-693e-45da-a2ba-de4763e11d30 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.930184] env[69367]: ERROR nova.scheduler.client.report [None req-0cf850bb-f4f3-4902-82da-ca6e283aae47 tempest-FloatingIPsAssociationNegativeTestJSON-1638525184 tempest-FloatingIPsAssociationNegativeTestJSON-1638525184-project-member] [req-44f825cd-e579-46bc-b3cf-3140bc54781b] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-44f825cd-e579-46bc-b3cf-3140bc54781b"}]} [ 676.930559] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0cf850bb-f4f3-4902-82da-ca6e283aae47 tempest-FloatingIPsAssociationNegativeTestJSON-1638525184 tempest-FloatingIPsAssociationNegativeTestJSON-1638525184-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.130s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 676.931155] env[69367]: ERROR nova.compute.manager [None req-0cf850bb-f4f3-4902-82da-ca6e283aae47 tempest-FloatingIPsAssociationNegativeTestJSON-1638525184 tempest-FloatingIPsAssociationNegativeTestJSON-1638525184-project-member] [instance: 8001cca4-9b9f-4425-b6e4-d27866395886] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 676.931155] env[69367]: ERROR nova.compute.manager [instance: 8001cca4-9b9f-4425-b6e4-d27866395886] Traceback (most recent call last): [ 676.931155] env[69367]: ERROR nova.compute.manager [instance: 8001cca4-9b9f-4425-b6e4-d27866395886] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 676.931155] env[69367]: ERROR nova.compute.manager [instance: 8001cca4-9b9f-4425-b6e4-d27866395886] yield [ 676.931155] env[69367]: ERROR nova.compute.manager [instance: 8001cca4-9b9f-4425-b6e4-d27866395886] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 676.931155] env[69367]: ERROR nova.compute.manager [instance: 8001cca4-9b9f-4425-b6e4-d27866395886] self.set_inventory_for_provider( [ 676.931155] env[69367]: ERROR nova.compute.manager [instance: 8001cca4-9b9f-4425-b6e4-d27866395886] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 676.931155] env[69367]: ERROR nova.compute.manager [instance: 8001cca4-9b9f-4425-b6e4-d27866395886] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 676.931610] env[69367]: ERROR nova.compute.manager [instance: 8001cca4-9b9f-4425-b6e4-d27866395886] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-44f825cd-e579-46bc-b3cf-3140bc54781b"}]} [ 676.931610] env[69367]: ERROR nova.compute.manager [instance: 8001cca4-9b9f-4425-b6e4-d27866395886] [ 676.931610] env[69367]: ERROR nova.compute.manager [instance: 8001cca4-9b9f-4425-b6e4-d27866395886] During handling of the above exception, another exception occurred: [ 676.931610] env[69367]: ERROR nova.compute.manager [instance: 8001cca4-9b9f-4425-b6e4-d27866395886] [ 676.931610] env[69367]: ERROR nova.compute.manager [instance: 8001cca4-9b9f-4425-b6e4-d27866395886] Traceback (most recent call last): [ 676.931610] env[69367]: ERROR nova.compute.manager [instance: 8001cca4-9b9f-4425-b6e4-d27866395886] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 676.931610] env[69367]: ERROR nova.compute.manager [instance: 8001cca4-9b9f-4425-b6e4-d27866395886] with self.rt.instance_claim(context, instance, node, allocs, [ 676.931610] env[69367]: ERROR nova.compute.manager [instance: 8001cca4-9b9f-4425-b6e4-d27866395886] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 676.931610] env[69367]: ERROR nova.compute.manager [instance: 8001cca4-9b9f-4425-b6e4-d27866395886] return f(*args, **kwargs) [ 676.932149] env[69367]: ERROR nova.compute.manager [instance: 8001cca4-9b9f-4425-b6e4-d27866395886] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 676.932149] env[69367]: ERROR nova.compute.manager [instance: 8001cca4-9b9f-4425-b6e4-d27866395886] self._update(elevated, cn) [ 676.932149] env[69367]: ERROR nova.compute.manager [instance: 8001cca4-9b9f-4425-b6e4-d27866395886] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 676.932149] env[69367]: ERROR nova.compute.manager [instance: 8001cca4-9b9f-4425-b6e4-d27866395886] self._update_to_placement(context, compute_node, startup) [ 676.932149] env[69367]: ERROR nova.compute.manager [instance: 8001cca4-9b9f-4425-b6e4-d27866395886] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 676.932149] env[69367]: ERROR nova.compute.manager [instance: 8001cca4-9b9f-4425-b6e4-d27866395886] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 676.932149] env[69367]: ERROR nova.compute.manager [instance: 8001cca4-9b9f-4425-b6e4-d27866395886] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 676.932149] env[69367]: ERROR nova.compute.manager [instance: 8001cca4-9b9f-4425-b6e4-d27866395886] return attempt.get(self._wrap_exception) [ 676.932149] env[69367]: ERROR nova.compute.manager [instance: 8001cca4-9b9f-4425-b6e4-d27866395886] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 676.932149] env[69367]: ERROR nova.compute.manager [instance: 8001cca4-9b9f-4425-b6e4-d27866395886] six.reraise(self.value[0], self.value[1], self.value[2]) [ 676.932149] env[69367]: ERROR nova.compute.manager [instance: 8001cca4-9b9f-4425-b6e4-d27866395886] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 676.932149] env[69367]: ERROR nova.compute.manager [instance: 8001cca4-9b9f-4425-b6e4-d27866395886] raise value [ 676.932149] env[69367]: ERROR nova.compute.manager [instance: 8001cca4-9b9f-4425-b6e4-d27866395886] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 676.932786] env[69367]: ERROR nova.compute.manager [instance: 8001cca4-9b9f-4425-b6e4-d27866395886] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 676.932786] env[69367]: ERROR nova.compute.manager [instance: 8001cca4-9b9f-4425-b6e4-d27866395886] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 676.932786] env[69367]: ERROR nova.compute.manager [instance: 8001cca4-9b9f-4425-b6e4-d27866395886] self.reportclient.update_from_provider_tree( [ 676.932786] env[69367]: ERROR nova.compute.manager [instance: 8001cca4-9b9f-4425-b6e4-d27866395886] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 676.932786] env[69367]: ERROR nova.compute.manager [instance: 8001cca4-9b9f-4425-b6e4-d27866395886] with catch_all(pd.uuid): [ 676.932786] env[69367]: ERROR nova.compute.manager [instance: 8001cca4-9b9f-4425-b6e4-d27866395886] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 676.932786] env[69367]: ERROR nova.compute.manager [instance: 8001cca4-9b9f-4425-b6e4-d27866395886] self.gen.throw(typ, value, traceback) [ 676.932786] env[69367]: ERROR nova.compute.manager [instance: 8001cca4-9b9f-4425-b6e4-d27866395886] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 676.932786] env[69367]: ERROR nova.compute.manager [instance: 8001cca4-9b9f-4425-b6e4-d27866395886] raise exception.ResourceProviderSyncFailed() [ 676.932786] env[69367]: ERROR nova.compute.manager [instance: 8001cca4-9b9f-4425-b6e4-d27866395886] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 676.932786] env[69367]: ERROR nova.compute.manager [instance: 8001cca4-9b9f-4425-b6e4-d27866395886] [ 676.933757] env[69367]: DEBUG nova.compute.utils [None req-0cf850bb-f4f3-4902-82da-ca6e283aae47 tempest-FloatingIPsAssociationNegativeTestJSON-1638525184 tempest-FloatingIPsAssociationNegativeTestJSON-1638525184-project-member] [instance: 8001cca4-9b9f-4425-b6e4-d27866395886] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 676.934177] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbe9db29-9464-4926-ac2d-0ae08fec0f47 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.938333] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a34c9c6d-e38d-4163-9cc3-a39df0a2ad31 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.877s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 676.940251] env[69367]: INFO nova.compute.claims [None req-a34c9c6d-e38d-4163-9cc3-a39df0a2ad31 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] [instance: f66c0467-a408-4e56-abdf-2c19cc3d9c11] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 676.945887] env[69367]: DEBUG nova.compute.manager [None req-0cf850bb-f4f3-4902-82da-ca6e283aae47 tempest-FloatingIPsAssociationNegativeTestJSON-1638525184 tempest-FloatingIPsAssociationNegativeTestJSON-1638525184-project-member] [instance: 8001cca4-9b9f-4425-b6e4-d27866395886] Build of instance 8001cca4-9b9f-4425-b6e4-d27866395886 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 676.945887] env[69367]: DEBUG nova.compute.manager [None req-0cf850bb-f4f3-4902-82da-ca6e283aae47 tempest-FloatingIPsAssociationNegativeTestJSON-1638525184 tempest-FloatingIPsAssociationNegativeTestJSON-1638525184-project-member] [instance: 8001cca4-9b9f-4425-b6e4-d27866395886] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 676.945887] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0cf850bb-f4f3-4902-82da-ca6e283aae47 tempest-FloatingIPsAssociationNegativeTestJSON-1638525184 tempest-FloatingIPsAssociationNegativeTestJSON-1638525184-project-member] Acquiring lock "refresh_cache-8001cca4-9b9f-4425-b6e4-d27866395886" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 676.945887] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0cf850bb-f4f3-4902-82da-ca6e283aae47 tempest-FloatingIPsAssociationNegativeTestJSON-1638525184 tempest-FloatingIPsAssociationNegativeTestJSON-1638525184-project-member] Acquired lock "refresh_cache-8001cca4-9b9f-4425-b6e4-d27866395886" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 676.946099] env[69367]: DEBUG nova.network.neutron [None req-0cf850bb-f4f3-4902-82da-ca6e283aae47 tempest-FloatingIPsAssociationNegativeTestJSON-1638525184 tempest-FloatingIPsAssociationNegativeTestJSON-1638525184-project-member] [instance: 8001cca4-9b9f-4425-b6e4-d27866395886] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 676.974410] env[69367]: DEBUG oslo_vmware.api [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': task-4233826, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.952794} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.974715] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore1] c17525ee-d038-4c81-932b-ed74a6de6cb5/c17525ee-d038-4c81-932b-ed74a6de6cb5.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 676.974919] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] Extending root virtual disk to 1048576 {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 676.975687] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6a09c271-c7c5-448d-8c5c-813839058789 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.979941] env[69367]: DEBUG nova.compute.manager [None req-0ff19e7d-e04d-4e0d-8d1c-5d315e749134 tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: 65fcdf23-421a-45c1-880e-a536ec9fbdfd] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 676.984709] env[69367]: DEBUG oslo_vmware.api [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Waiting for the task: (returnval){ [ 676.984709] env[69367]: value = "task-4233830" [ 676.984709] env[69367]: _type = "Task" [ 676.984709] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.996628] env[69367]: DEBUG oslo_vmware.api [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': task-4233830, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.158015] env[69367]: DEBUG oslo_vmware.api [None req-4a4e2df7-def0-4f89-8bf6-a91349a96fef tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Task: {'id': task-4233828, 'name': PowerOffVM_Task, 'duration_secs': 0.449392} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 677.158015] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a4e2df7-def0-4f89-8bf6-a91349a96fef tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] Powered off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 677.158015] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-4a4e2df7-def0-4f89-8bf6-a91349a96fef tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] Unregistering the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 677.158015] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a5920cab-9b2f-4250-be4a-f73f86c03027 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.252955] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-4a4e2df7-def0-4f89-8bf6-a91349a96fef tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] Unregistered the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 677.253310] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-4a4e2df7-def0-4f89-8bf6-a91349a96fef tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] Deleting contents of the VM from datastore datastore2 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 677.253600] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a4e2df7-def0-4f89-8bf6-a91349a96fef tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Deleting the datastore file [datastore2] 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9 {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 677.253963] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-13a8e532-4a89-4a2b-bfd9-e03f80da0da8 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.263918] env[69367]: DEBUG oslo_vmware.api [None req-4a4e2df7-def0-4f89-8bf6-a91349a96fef tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Waiting for the task: (returnval){ [ 677.263918] env[69367]: value = "task-4233832" [ 677.263918] env[69367]: _type = "Task" [ 677.263918] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 677.279626] env[69367]: DEBUG oslo_vmware.api [None req-4a4e2df7-def0-4f89-8bf6-a91349a96fef tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Task: {'id': task-4233832, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.335311] env[69367]: DEBUG oslo_vmware.api [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': task-4233829, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.507540] env[69367]: DEBUG oslo_vmware.api [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': task-4233830, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.105519} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 677.507540] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] Extended root virtual disk {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 677.510554] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-389c918c-d234-48c2-9ddd-c77d6f65a588 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.537553] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] Reconfiguring VM instance instance-00000016 to attach disk [datastore1] c17525ee-d038-4c81-932b-ed74a6de6cb5/c17525ee-d038-4c81-932b-ed74a6de6cb5.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 677.538720] env[69367]: DEBUG nova.network.neutron [None req-0cf850bb-f4f3-4902-82da-ca6e283aae47 tempest-FloatingIPsAssociationNegativeTestJSON-1638525184 tempest-FloatingIPsAssociationNegativeTestJSON-1638525184-project-member] [instance: 8001cca4-9b9f-4425-b6e4-d27866395886] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 677.541920] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0ff19e7d-e04d-4e0d-8d1c-5d315e749134 tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 677.542583] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-37bf55a5-0d72-4854-a16c-087674019df8 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.568326] env[69367]: DEBUG oslo_vmware.api [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Waiting for the task: (returnval){ [ 677.568326] env[69367]: value = "task-4233833" [ 677.568326] env[69367]: _type = "Task" [ 677.568326] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 677.580599] env[69367]: DEBUG oslo_vmware.api [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': task-4233833, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.591731] env[69367]: DEBUG nova.network.neutron [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Successfully created port: 5401116f-daf2-4db0-b052-7bd1adb63cc1 {{(pid=69367) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 677.679397] env[69367]: DEBUG nova.network.neutron [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] [instance: 92c27615-d377-492f-a9db-ff45b2e71537] Successfully updated port: 495ec91b-986e-4b74-820f-28ae7f03a86a {{(pid=69367) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 677.780030] env[69367]: DEBUG oslo_vmware.api [None req-4a4e2df7-def0-4f89-8bf6-a91349a96fef tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Task: {'id': task-4233832, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.454041} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 677.781199] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a4e2df7-def0-4f89-8bf6-a91349a96fef tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Deleted the datastore file {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 677.781478] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-4a4e2df7-def0-4f89-8bf6-a91349a96fef tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] Deleted contents of the VM from datastore datastore2 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 677.781673] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-4a4e2df7-def0-4f89-8bf6-a91349a96fef tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] Instance destroyed {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 677.781871] env[69367]: INFO nova.compute.manager [None req-4a4e2df7-def0-4f89-8bf6-a91349a96fef tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] Took 1.17 seconds to destroy the instance on the hypervisor. [ 677.782221] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4a4e2df7-def0-4f89-8bf6-a91349a96fef tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 677.786822] env[69367]: DEBUG nova.compute.manager [-] [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 677.786822] env[69367]: DEBUG nova.network.neutron [-] [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 677.787684] env[69367]: DEBUG oslo_concurrency.lockutils [None req-b74dbd34-b054-442c-9c38-e89bb046091c tempest-AttachInterfacesUnderV243Test-977744864 tempest-AttachInterfacesUnderV243Test-977744864-project-member] Acquiring lock "250a50bf-c4b0-4997-9ce5-6dbeb617e9ed" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 677.787930] env[69367]: DEBUG oslo_concurrency.lockutils [None req-b74dbd34-b054-442c-9c38-e89bb046091c tempest-AttachInterfacesUnderV243Test-977744864 tempest-AttachInterfacesUnderV243Test-977744864-project-member] Lock "250a50bf-c4b0-4997-9ce5-6dbeb617e9ed" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 677.829578] env[69367]: DEBUG oslo_vmware.api [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': task-4233829, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.977254] env[69367]: DEBUG nova.scheduler.client.report [None req-a34c9c6d-e38d-4163-9cc3-a39df0a2ad31 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 678.005482] env[69367]: DEBUG nova.scheduler.client.report [None req-a34c9c6d-e38d-4163-9cc3-a39df0a2ad31 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 678.005728] env[69367]: DEBUG nova.compute.provider_tree [None req-a34c9c6d-e38d-4163-9cc3-a39df0a2ad31 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 678.030029] env[69367]: DEBUG nova.network.neutron [None req-0cf850bb-f4f3-4902-82da-ca6e283aae47 tempest-FloatingIPsAssociationNegativeTestJSON-1638525184 tempest-FloatingIPsAssociationNegativeTestJSON-1638525184-project-member] [instance: 8001cca4-9b9f-4425-b6e4-d27866395886] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 678.032579] env[69367]: DEBUG nova.scheduler.client.report [None req-a34c9c6d-e38d-4163-9cc3-a39df0a2ad31 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 678.066461] env[69367]: DEBUG nova.scheduler.client.report [None req-a34c9c6d-e38d-4163-9cc3-a39df0a2ad31 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 678.086303] env[69367]: DEBUG oslo_vmware.api [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': task-4233833, 'name': ReconfigVM_Task, 'duration_secs': 0.433878} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 678.086303] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] Reconfigured VM instance instance-00000016 to attach disk [datastore1] c17525ee-d038-4c81-932b-ed74a6de6cb5/c17525ee-d038-4c81-932b-ed74a6de6cb5.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 678.086303] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-324b3807-0461-4821-aab1-7428ee5e1a09 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.101479] env[69367]: DEBUG oslo_vmware.api [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Waiting for the task: (returnval){ [ 678.101479] env[69367]: value = "task-4233834" [ 678.101479] env[69367]: _type = "Task" [ 678.101479] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 678.111740] env[69367]: DEBUG oslo_vmware.api [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': task-4233834, 'name': Rename_Task} progress is 6%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.185185] env[69367]: DEBUG oslo_concurrency.lockutils [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Acquiring lock "refresh_cache-92c27615-d377-492f-a9db-ff45b2e71537" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 678.185185] env[69367]: DEBUG oslo_concurrency.lockutils [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Acquired lock "refresh_cache-92c27615-d377-492f-a9db-ff45b2e71537" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 678.185185] env[69367]: DEBUG nova.network.neutron [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] [instance: 92c27615-d377-492f-a9db-ff45b2e71537] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 678.333629] env[69367]: DEBUG oslo_vmware.api [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': task-4233829, 'name': PowerOnVM_Task, 'duration_secs': 1.342532} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 678.333964] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab365570-ac29-4094-be4c-d49563a465c8] Powered on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 678.334224] env[69367]: INFO nova.compute.manager [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab365570-ac29-4094-be4c-d49563a465c8] Took 16.53 seconds to spawn the instance on the hypervisor. [ 678.334433] env[69367]: DEBUG nova.compute.manager [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab365570-ac29-4094-be4c-d49563a465c8] Checking state {{(pid=69367) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 678.335536] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0c2a9bc-9e84-4485-8e27-95e361483027 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.532588] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0cf850bb-f4f3-4902-82da-ca6e283aae47 tempest-FloatingIPsAssociationNegativeTestJSON-1638525184 tempest-FloatingIPsAssociationNegativeTestJSON-1638525184-project-member] Releasing lock "refresh_cache-8001cca4-9b9f-4425-b6e4-d27866395886" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 678.532588] env[69367]: DEBUG nova.compute.manager [None req-0cf850bb-f4f3-4902-82da-ca6e283aae47 tempest-FloatingIPsAssociationNegativeTestJSON-1638525184 tempest-FloatingIPsAssociationNegativeTestJSON-1638525184-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 678.532588] env[69367]: DEBUG nova.compute.manager [None req-0cf850bb-f4f3-4902-82da-ca6e283aae47 tempest-FloatingIPsAssociationNegativeTestJSON-1638525184 tempest-FloatingIPsAssociationNegativeTestJSON-1638525184-project-member] [instance: 8001cca4-9b9f-4425-b6e4-d27866395886] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 678.532588] env[69367]: DEBUG nova.network.neutron [None req-0cf850bb-f4f3-4902-82da-ca6e283aae47 tempest-FloatingIPsAssociationNegativeTestJSON-1638525184 tempest-FloatingIPsAssociationNegativeTestJSON-1638525184-project-member] [instance: 8001cca4-9b9f-4425-b6e4-d27866395886] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 678.562594] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16d76682-6f98-40c9-b748-b58c95beaf16 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.579090] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36ff08bd-1ad5-4a96-a001-dd92a25e0f9b {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.626991] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6456d90c-7bbf-44d0-ba88-0cd1c7386dfe {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.635113] env[69367]: DEBUG oslo_vmware.api [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': task-4233834, 'name': Rename_Task} progress is 99%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.640818] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d4c0053-9acb-42a1-bc67-7804bf05ce63 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.656083] env[69367]: DEBUG nova.compute.provider_tree [None req-a34c9c6d-e38d-4163-9cc3-a39df0a2ad31 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 678.695365] env[69367]: DEBUG nova.compute.manager [req-098e5815-a035-4649-812a-9ebca5fb0a12 req-d58c0fe7-62d3-4d0c-b667-f7fc7bf90b63 service nova] [instance: 92c27615-d377-492f-a9db-ff45b2e71537] Received event network-vif-plugged-495ec91b-986e-4b74-820f-28ae7f03a86a {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 678.695365] env[69367]: DEBUG oslo_concurrency.lockutils [req-098e5815-a035-4649-812a-9ebca5fb0a12 req-d58c0fe7-62d3-4d0c-b667-f7fc7bf90b63 service nova] Acquiring lock "92c27615-d377-492f-a9db-ff45b2e71537-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 678.695365] env[69367]: DEBUG oslo_concurrency.lockutils [req-098e5815-a035-4649-812a-9ebca5fb0a12 req-d58c0fe7-62d3-4d0c-b667-f7fc7bf90b63 service nova] Lock "92c27615-d377-492f-a9db-ff45b2e71537-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 678.695365] env[69367]: DEBUG oslo_concurrency.lockutils [req-098e5815-a035-4649-812a-9ebca5fb0a12 req-d58c0fe7-62d3-4d0c-b667-f7fc7bf90b63 service nova] Lock "92c27615-d377-492f-a9db-ff45b2e71537-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 678.695365] env[69367]: DEBUG nova.compute.manager [req-098e5815-a035-4649-812a-9ebca5fb0a12 req-d58c0fe7-62d3-4d0c-b667-f7fc7bf90b63 service nova] [instance: 92c27615-d377-492f-a9db-ff45b2e71537] No waiting events found dispatching network-vif-plugged-495ec91b-986e-4b74-820f-28ae7f03a86a {{(pid=69367) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 678.695652] env[69367]: WARNING nova.compute.manager [req-098e5815-a035-4649-812a-9ebca5fb0a12 req-d58c0fe7-62d3-4d0c-b667-f7fc7bf90b63 service nova] [instance: 92c27615-d377-492f-a9db-ff45b2e71537] Received unexpected event network-vif-plugged-495ec91b-986e-4b74-820f-28ae7f03a86a for instance with vm_state building and task_state spawning. [ 678.714091] env[69367]: DEBUG nova.network.neutron [None req-0cf850bb-f4f3-4902-82da-ca6e283aae47 tempest-FloatingIPsAssociationNegativeTestJSON-1638525184 tempest-FloatingIPsAssociationNegativeTestJSON-1638525184-project-member] [instance: 8001cca4-9b9f-4425-b6e4-d27866395886] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 678.730545] env[69367]: DEBUG nova.network.neutron [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] [instance: 92c27615-d377-492f-a9db-ff45b2e71537] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 678.865183] env[69367]: INFO nova.compute.manager [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab365570-ac29-4094-be4c-d49563a465c8] Took 40.62 seconds to build instance. [ 679.131770] env[69367]: DEBUG oslo_vmware.api [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': task-4233834, 'name': Rename_Task} progress is 99%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.165454] env[69367]: DEBUG nova.network.neutron [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] [instance: 92c27615-d377-492f-a9db-ff45b2e71537] Updating instance_info_cache with network_info: [{"id": "495ec91b-986e-4b74-820f-28ae7f03a86a", "address": "fa:16:3e:6f:58:18", "network": {"id": "c3914068-abfe-4a12-9775-bc4851a19549", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-393561532-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e332ecec6c1c43c18345d8a2761d98be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9079d3b9-5c2d-4ca1-8d2f-68ceb8ec8c98", "external-id": "nsx-vlan-transportzone-527", "segmentation_id": 527, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap495ec91b-98", "ovs_interfaceid": "495ec91b-986e-4b74-820f-28ae7f03a86a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 679.196570] env[69367]: ERROR nova.scheduler.client.report [None req-a34c9c6d-e38d-4163-9cc3-a39df0a2ad31 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] [req-def05eb0-bd14-4928-85c8-c69ae66cc6f4] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-def05eb0-bd14-4928-85c8-c69ae66cc6f4"}]} [ 679.196991] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a34c9c6d-e38d-4163-9cc3-a39df0a2ad31 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.259s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 679.197675] env[69367]: ERROR nova.compute.manager [None req-a34c9c6d-e38d-4163-9cc3-a39df0a2ad31 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] [instance: f66c0467-a408-4e56-abdf-2c19cc3d9c11] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 679.197675] env[69367]: ERROR nova.compute.manager [instance: f66c0467-a408-4e56-abdf-2c19cc3d9c11] Traceback (most recent call last): [ 679.197675] env[69367]: ERROR nova.compute.manager [instance: f66c0467-a408-4e56-abdf-2c19cc3d9c11] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 679.197675] env[69367]: ERROR nova.compute.manager [instance: f66c0467-a408-4e56-abdf-2c19cc3d9c11] yield [ 679.197675] env[69367]: ERROR nova.compute.manager [instance: f66c0467-a408-4e56-abdf-2c19cc3d9c11] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 679.197675] env[69367]: ERROR nova.compute.manager [instance: f66c0467-a408-4e56-abdf-2c19cc3d9c11] self.set_inventory_for_provider( [ 679.197675] env[69367]: ERROR nova.compute.manager [instance: f66c0467-a408-4e56-abdf-2c19cc3d9c11] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 679.197675] env[69367]: ERROR nova.compute.manager [instance: f66c0467-a408-4e56-abdf-2c19cc3d9c11] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 679.198199] env[69367]: ERROR nova.compute.manager [instance: f66c0467-a408-4e56-abdf-2c19cc3d9c11] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-def05eb0-bd14-4928-85c8-c69ae66cc6f4"}]} [ 679.198199] env[69367]: ERROR nova.compute.manager [instance: f66c0467-a408-4e56-abdf-2c19cc3d9c11] [ 679.198199] env[69367]: ERROR nova.compute.manager [instance: f66c0467-a408-4e56-abdf-2c19cc3d9c11] During handling of the above exception, another exception occurred: [ 679.198199] env[69367]: ERROR nova.compute.manager [instance: f66c0467-a408-4e56-abdf-2c19cc3d9c11] [ 679.198199] env[69367]: ERROR nova.compute.manager [instance: f66c0467-a408-4e56-abdf-2c19cc3d9c11] Traceback (most recent call last): [ 679.198199] env[69367]: ERROR nova.compute.manager [instance: f66c0467-a408-4e56-abdf-2c19cc3d9c11] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 679.198199] env[69367]: ERROR nova.compute.manager [instance: f66c0467-a408-4e56-abdf-2c19cc3d9c11] with self.rt.instance_claim(context, instance, node, allocs, [ 679.198199] env[69367]: ERROR nova.compute.manager [instance: f66c0467-a408-4e56-abdf-2c19cc3d9c11] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 679.198199] env[69367]: ERROR nova.compute.manager [instance: f66c0467-a408-4e56-abdf-2c19cc3d9c11] return f(*args, **kwargs) [ 679.198627] env[69367]: ERROR nova.compute.manager [instance: f66c0467-a408-4e56-abdf-2c19cc3d9c11] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 679.198627] env[69367]: ERROR nova.compute.manager [instance: f66c0467-a408-4e56-abdf-2c19cc3d9c11] self._update(elevated, cn) [ 679.198627] env[69367]: ERROR nova.compute.manager [instance: f66c0467-a408-4e56-abdf-2c19cc3d9c11] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 679.198627] env[69367]: ERROR nova.compute.manager [instance: f66c0467-a408-4e56-abdf-2c19cc3d9c11] self._update_to_placement(context, compute_node, startup) [ 679.198627] env[69367]: ERROR nova.compute.manager [instance: f66c0467-a408-4e56-abdf-2c19cc3d9c11] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 679.198627] env[69367]: ERROR nova.compute.manager [instance: f66c0467-a408-4e56-abdf-2c19cc3d9c11] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 679.198627] env[69367]: ERROR nova.compute.manager [instance: f66c0467-a408-4e56-abdf-2c19cc3d9c11] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 679.198627] env[69367]: ERROR nova.compute.manager [instance: f66c0467-a408-4e56-abdf-2c19cc3d9c11] return attempt.get(self._wrap_exception) [ 679.198627] env[69367]: ERROR nova.compute.manager [instance: f66c0467-a408-4e56-abdf-2c19cc3d9c11] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 679.198627] env[69367]: ERROR nova.compute.manager [instance: f66c0467-a408-4e56-abdf-2c19cc3d9c11] six.reraise(self.value[0], self.value[1], self.value[2]) [ 679.198627] env[69367]: ERROR nova.compute.manager [instance: f66c0467-a408-4e56-abdf-2c19cc3d9c11] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 679.198627] env[69367]: ERROR nova.compute.manager [instance: f66c0467-a408-4e56-abdf-2c19cc3d9c11] raise value [ 679.198627] env[69367]: ERROR nova.compute.manager [instance: f66c0467-a408-4e56-abdf-2c19cc3d9c11] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 679.199147] env[69367]: ERROR nova.compute.manager [instance: f66c0467-a408-4e56-abdf-2c19cc3d9c11] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 679.199147] env[69367]: ERROR nova.compute.manager [instance: f66c0467-a408-4e56-abdf-2c19cc3d9c11] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 679.199147] env[69367]: ERROR nova.compute.manager [instance: f66c0467-a408-4e56-abdf-2c19cc3d9c11] self.reportclient.update_from_provider_tree( [ 679.199147] env[69367]: ERROR nova.compute.manager [instance: f66c0467-a408-4e56-abdf-2c19cc3d9c11] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 679.199147] env[69367]: ERROR nova.compute.manager [instance: f66c0467-a408-4e56-abdf-2c19cc3d9c11] with catch_all(pd.uuid): [ 679.199147] env[69367]: ERROR nova.compute.manager [instance: f66c0467-a408-4e56-abdf-2c19cc3d9c11] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 679.199147] env[69367]: ERROR nova.compute.manager [instance: f66c0467-a408-4e56-abdf-2c19cc3d9c11] self.gen.throw(typ, value, traceback) [ 679.199147] env[69367]: ERROR nova.compute.manager [instance: f66c0467-a408-4e56-abdf-2c19cc3d9c11] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 679.199147] env[69367]: ERROR nova.compute.manager [instance: f66c0467-a408-4e56-abdf-2c19cc3d9c11] raise exception.ResourceProviderSyncFailed() [ 679.199147] env[69367]: ERROR nova.compute.manager [instance: f66c0467-a408-4e56-abdf-2c19cc3d9c11] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 679.199147] env[69367]: ERROR nova.compute.manager [instance: f66c0467-a408-4e56-abdf-2c19cc3d9c11] [ 679.199594] env[69367]: DEBUG nova.compute.utils [None req-a34c9c6d-e38d-4163-9cc3-a39df0a2ad31 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] [instance: f66c0467-a408-4e56-abdf-2c19cc3d9c11] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 679.199594] env[69367]: DEBUG oslo_concurrency.lockutils [None req-7b94712f-24c6-4e7c-bb81-62b66d431601 tempest-DeleteServersAdminTestJSON-676728523 tempest-DeleteServersAdminTestJSON-676728523-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.164s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 679.199884] env[69367]: DEBUG oslo_concurrency.lockutils [None req-7b94712f-24c6-4e7c-bb81-62b66d431601 tempest-DeleteServersAdminTestJSON-676728523 tempest-DeleteServersAdminTestJSON-676728523-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 679.200335] env[69367]: INFO nova.compute.manager [None req-7b94712f-24c6-4e7c-bb81-62b66d431601 tempest-DeleteServersAdminTestJSON-676728523 tempest-DeleteServersAdminTestJSON-676728523-project-admin] [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] Successfully reverted task state from None on failure for instance. [ 679.206558] env[69367]: DEBUG oslo_concurrency.lockutils [None req-feb72e4c-b567-4182-bef0-66f7e1a58d17 tempest-ServerDiagnosticsNegativeTest-1764795038 tempest-ServerDiagnosticsNegativeTest-1764795038-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.961s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 679.207178] env[69367]: INFO nova.compute.claims [None req-feb72e4c-b567-4182-bef0-66f7e1a58d17 tempest-ServerDiagnosticsNegativeTest-1764795038 tempest-ServerDiagnosticsNegativeTest-1764795038-project-member] [instance: c7bc6ebd-d7fd-439a-829f-8f4bf2065623] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 679.212169] env[69367]: DEBUG nova.compute.manager [None req-a34c9c6d-e38d-4163-9cc3-a39df0a2ad31 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] [instance: f66c0467-a408-4e56-abdf-2c19cc3d9c11] Build of instance f66c0467-a408-4e56-abdf-2c19cc3d9c11 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 679.212169] env[69367]: DEBUG nova.compute.manager [None req-a34c9c6d-e38d-4163-9cc3-a39df0a2ad31 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] [instance: f66c0467-a408-4e56-abdf-2c19cc3d9c11] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 679.212169] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a34c9c6d-e38d-4163-9cc3-a39df0a2ad31 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Acquiring lock "refresh_cache-f66c0467-a408-4e56-abdf-2c19cc3d9c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 679.212451] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a34c9c6d-e38d-4163-9cc3-a39df0a2ad31 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Acquired lock "refresh_cache-f66c0467-a408-4e56-abdf-2c19cc3d9c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 679.212451] env[69367]: DEBUG nova.network.neutron [None req-a34c9c6d-e38d-4163-9cc3-a39df0a2ad31 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] [instance: f66c0467-a408-4e56-abdf-2c19cc3d9c11] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 679.218410] env[69367]: DEBUG nova.network.neutron [None req-0cf850bb-f4f3-4902-82da-ca6e283aae47 tempest-FloatingIPsAssociationNegativeTestJSON-1638525184 tempest-FloatingIPsAssociationNegativeTestJSON-1638525184-project-member] [instance: 8001cca4-9b9f-4425-b6e4-d27866395886] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 679.228199] env[69367]: ERROR oslo_messaging.rpc.server [None req-7b94712f-24c6-4e7c-bb81-62b66d431601 tempest-DeleteServersAdminTestJSON-676728523 tempest-DeleteServersAdminTestJSON-676728523-project-admin] Exception during message handling: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 679.228199] env[69367]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 679.228199] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 679.228199] env[69367]: ERROR oslo_messaging.rpc.server yield [ 679.228199] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 679.228199] env[69367]: ERROR oslo_messaging.rpc.server self.set_inventory_for_provider( [ 679.228199] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 679.228199] env[69367]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 679.228199] env[69367]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-2e4857cc-da57-4430-afc3-2d8fb2610406"}]} [ 679.228199] env[69367]: ERROR oslo_messaging.rpc.server [ 679.229019] env[69367]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 679.229019] env[69367]: ERROR oslo_messaging.rpc.server [ 679.229019] env[69367]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 679.229019] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 174, in _process_incoming [ 679.229019] env[69367]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 679.229019] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 679.229019] env[69367]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 679.229019] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 679.229019] env[69367]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 679.229019] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 679.229019] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 679.229019] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 679.229019] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 679.229019] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 679.229019] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 679.229019] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 679.229019] env[69367]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 679.229019] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 167, in decorated_function [ 679.229622] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 679.229622] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 679.229622] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 679.229622] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 679.229622] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 679.229622] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 158, in decorated_function [ 679.229622] env[69367]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 679.229622] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1483, in decorated_function [ 679.229622] env[69367]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 679.229622] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 214, in decorated_function [ 679.229622] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 679.229622] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 679.229622] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 679.229622] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 679.229622] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 679.229622] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 204, in decorated_function [ 679.229622] env[69367]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 679.229622] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3388, in terminate_instance [ 679.230594] env[69367]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 679.230594] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 679.230594] env[69367]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 679.230594] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3383, in do_terminate_instance [ 679.230594] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 679.230594] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 679.230594] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 679.230594] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 679.230594] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 679.230594] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 679.230594] env[69367]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 679.230594] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 679.230594] env[69367]: ERROR oslo_messaging.rpc.server self._complete_deletion(context, instance) [ 679.230594] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 679.230594] env[69367]: ERROR oslo_messaging.rpc.server self._update_resource_tracker(context, instance) [ 679.230594] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 679.230594] env[69367]: ERROR oslo_messaging.rpc.server self.rt.update_usage(context, instance, instance.node) [ 679.230594] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 679.231429] env[69367]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 679.231429] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 679.231429] env[69367]: ERROR oslo_messaging.rpc.server self._update(context.elevated(), self.compute_nodes[nodename]) [ 679.231429] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 679.231429] env[69367]: ERROR oslo_messaging.rpc.server self._update_to_placement(context, compute_node, startup) [ 679.231429] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 679.231429] env[69367]: ERROR oslo_messaging.rpc.server return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 679.231429] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 679.231429] env[69367]: ERROR oslo_messaging.rpc.server return attempt.get(self._wrap_exception) [ 679.231429] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 679.231429] env[69367]: ERROR oslo_messaging.rpc.server six.reraise(self.value[0], self.value[1], self.value[2]) [ 679.231429] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 679.231429] env[69367]: ERROR oslo_messaging.rpc.server raise value [ 679.231429] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 679.231429] env[69367]: ERROR oslo_messaging.rpc.server attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 679.231429] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 679.231429] env[69367]: ERROR oslo_messaging.rpc.server self.reportclient.update_from_provider_tree( [ 679.232204] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 679.232204] env[69367]: ERROR oslo_messaging.rpc.server with catch_all(pd.uuid): [ 679.232204] env[69367]: ERROR oslo_messaging.rpc.server File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 679.232204] env[69367]: ERROR oslo_messaging.rpc.server self.gen.throw(typ, value, traceback) [ 679.232204] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 679.232204] env[69367]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderSyncFailed() [ 679.232204] env[69367]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 679.232204] env[69367]: ERROR oslo_messaging.rpc.server [ 679.263851] env[69367]: DEBUG oslo_concurrency.lockutils [None req-39e01228-3e3c-4114-afc9-ee22a5731ffc tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Acquiring lock "ba4d981a-19f7-41ef-b7d1-a3f3830fe725" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 679.264135] env[69367]: DEBUG oslo_concurrency.lockutils [None req-39e01228-3e3c-4114-afc9-ee22a5731ffc tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Lock "ba4d981a-19f7-41ef-b7d1-a3f3830fe725" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 679.264373] env[69367]: DEBUG oslo_concurrency.lockutils [None req-39e01228-3e3c-4114-afc9-ee22a5731ffc tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Acquiring lock "ba4d981a-19f7-41ef-b7d1-a3f3830fe725-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 679.264563] env[69367]: DEBUG oslo_concurrency.lockutils [None req-39e01228-3e3c-4114-afc9-ee22a5731ffc tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Lock "ba4d981a-19f7-41ef-b7d1-a3f3830fe725-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 679.264738] env[69367]: DEBUG oslo_concurrency.lockutils [None req-39e01228-3e3c-4114-afc9-ee22a5731ffc tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Lock "ba4d981a-19f7-41ef-b7d1-a3f3830fe725-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 679.270343] env[69367]: INFO nova.compute.manager [None req-39e01228-3e3c-4114-afc9-ee22a5731ffc tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] [instance: ba4d981a-19f7-41ef-b7d1-a3f3830fe725] Terminating instance [ 679.367678] env[69367]: DEBUG oslo_concurrency.lockutils [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Lock "ab365570-ac29-4094-be4c-d49563a465c8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.063s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 679.638847] env[69367]: DEBUG oslo_vmware.api [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': task-4233834, 'name': Rename_Task, 'duration_secs': 1.258396} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 679.639161] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] Powering on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 679.639533] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1f25d291-fcc5-4e01-930a-54ea8270a2f5 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.648870] env[69367]: DEBUG oslo_vmware.api [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Waiting for the task: (returnval){ [ 679.648870] env[69367]: value = "task-4233835" [ 679.648870] env[69367]: _type = "Task" [ 679.648870] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 679.707767] env[69367]: DEBUG oslo_vmware.api [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': task-4233835, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.707767] env[69367]: DEBUG oslo_concurrency.lockutils [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Releasing lock "refresh_cache-92c27615-d377-492f-a9db-ff45b2e71537" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 679.707865] env[69367]: DEBUG nova.compute.manager [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] [instance: 92c27615-d377-492f-a9db-ff45b2e71537] Instance network_info: |[{"id": "495ec91b-986e-4b74-820f-28ae7f03a86a", "address": "fa:16:3e:6f:58:18", "network": {"id": "c3914068-abfe-4a12-9775-bc4851a19549", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-393561532-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e332ecec6c1c43c18345d8a2761d98be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9079d3b9-5c2d-4ca1-8d2f-68ceb8ec8c98", "external-id": "nsx-vlan-transportzone-527", "segmentation_id": 527, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap495ec91b-98", "ovs_interfaceid": "495ec91b-986e-4b74-820f-28ae7f03a86a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69367) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 679.708156] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] [instance: 92c27615-d377-492f-a9db-ff45b2e71537] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6f:58:18', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9079d3b9-5c2d-4ca1-8d2f-68ceb8ec8c98', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '495ec91b-986e-4b74-820f-28ae7f03a86a', 'vif_model': 'vmxnet3'}] {{(pid=69367) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 679.708156] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Creating folder: Project (e332ecec6c1c43c18345d8a2761d98be). Parent ref: group-v837645. {{(pid=69367) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 679.708156] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d9db9d8c-0348-4f1f-ab0f-19a9de6ba340 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.708156] env[69367]: INFO nova.virt.vmwareapi.vm_util [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Created folder: Project (e332ecec6c1c43c18345d8a2761d98be) in parent group-v837645. [ 679.708156] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Creating folder: Instances. Parent ref: group-v837690. {{(pid=69367) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 679.715053] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7a9714f1-6e5d-406d-a9c8-210acc4ff9dd {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.715053] env[69367]: INFO nova.virt.vmwareapi.vm_util [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Created folder: Instances in parent group-v837690. [ 679.715053] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 679.715232] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 92c27615-d377-492f-a9db-ff45b2e71537] Creating VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 679.717635] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-610fe7d8-cf6d-40f0-8cdd-475b585b95cb {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.756220] env[69367]: INFO nova.compute.manager [None req-0cf850bb-f4f3-4902-82da-ca6e283aae47 tempest-FloatingIPsAssociationNegativeTestJSON-1638525184 tempest-FloatingIPsAssociationNegativeTestJSON-1638525184-project-member] [instance: 8001cca4-9b9f-4425-b6e4-d27866395886] Took 1.22 seconds to deallocate network for instance. [ 679.776731] env[69367]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 679.776731] env[69367]: value = "task-4233838" [ 679.776731] env[69367]: _type = "Task" [ 679.776731] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 679.776731] env[69367]: DEBUG nova.compute.manager [None req-39e01228-3e3c-4114-afc9-ee22a5731ffc tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] [instance: ba4d981a-19f7-41ef-b7d1-a3f3830fe725] Start destroying the instance on the hypervisor. {{(pid=69367) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 679.776731] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-39e01228-3e3c-4114-afc9-ee22a5731ffc tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] [instance: ba4d981a-19f7-41ef-b7d1-a3f3830fe725] Destroying instance {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 679.779337] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a91e253b-c852-4c8f-acc5-d30a106c68ce {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.807155] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4233838, 'name': CreateVM_Task} progress is 6%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.810556] env[69367]: DEBUG nova.network.neutron [None req-a34c9c6d-e38d-4163-9cc3-a39df0a2ad31 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] [instance: f66c0467-a408-4e56-abdf-2c19cc3d9c11] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 679.814911] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-39e01228-3e3c-4114-afc9-ee22a5731ffc tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] [instance: ba4d981a-19f7-41ef-b7d1-a3f3830fe725] Powering off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 679.816556] env[69367]: DEBUG nova.scheduler.client.report [None req-feb72e4c-b567-4182-bef0-66f7e1a58d17 tempest-ServerDiagnosticsNegativeTest-1764795038 tempest-ServerDiagnosticsNegativeTest-1764795038-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 679.818876] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1ad3a289-5c94-4ff8-8f07-7ee2ce65a4d3 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.839816] env[69367]: DEBUG oslo_vmware.api [None req-39e01228-3e3c-4114-afc9-ee22a5731ffc tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Waiting for the task: (returnval){ [ 679.839816] env[69367]: value = "task-4233839" [ 679.839816] env[69367]: _type = "Task" [ 679.839816] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 679.850825] env[69367]: DEBUG oslo_vmware.api [None req-39e01228-3e3c-4114-afc9-ee22a5731ffc tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Task: {'id': task-4233839, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.855024] env[69367]: DEBUG nova.scheduler.client.report [None req-feb72e4c-b567-4182-bef0-66f7e1a58d17 tempest-ServerDiagnosticsNegativeTest-1764795038 tempest-ServerDiagnosticsNegativeTest-1764795038-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 679.855024] env[69367]: DEBUG nova.compute.provider_tree [None req-feb72e4c-b567-4182-bef0-66f7e1a58d17 tempest-ServerDiagnosticsNegativeTest-1764795038 tempest-ServerDiagnosticsNegativeTest-1764795038-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 679.875692] env[69367]: DEBUG nova.compute.manager [None req-e9a8e492-a9b6-49b5-a43e-ab2503d52af0 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: 171efb4b-7da6-4db3-88db-c36a9d04f872] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 679.892693] env[69367]: DEBUG nova.scheduler.client.report [None req-feb72e4c-b567-4182-bef0-66f7e1a58d17 tempest-ServerDiagnosticsNegativeTest-1764795038 tempest-ServerDiagnosticsNegativeTest-1764795038-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 679.943196] env[69367]: DEBUG nova.scheduler.client.report [None req-feb72e4c-b567-4182-bef0-66f7e1a58d17 tempest-ServerDiagnosticsNegativeTest-1764795038 tempest-ServerDiagnosticsNegativeTest-1764795038-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 680.139026] env[69367]: DEBUG nova.network.neutron [None req-a34c9c6d-e38d-4163-9cc3-a39df0a2ad31 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] [instance: f66c0467-a408-4e56-abdf-2c19cc3d9c11] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 680.170032] env[69367]: DEBUG oslo_vmware.api [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': task-4233835, 'name': PowerOnVM_Task} progress is 79%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.186083] env[69367]: DEBUG oslo_concurrency.lockutils [None req-785c96af-d561-41aa-8b94-aba2c186e78c tempest-ServerActionsTestOtherA-1360477532 tempest-ServerActionsTestOtherA-1360477532-project-member] Acquiring lock "26418f26-07ae-45e4-87d6-bdcf99674fb5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 680.186083] env[69367]: DEBUG oslo_concurrency.lockutils [None req-785c96af-d561-41aa-8b94-aba2c186e78c tempest-ServerActionsTestOtherA-1360477532 tempest-ServerActionsTestOtherA-1360477532-project-member] Lock "26418f26-07ae-45e4-87d6-bdcf99674fb5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 680.200960] env[69367]: DEBUG nova.network.neutron [-] [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 680.310856] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4233838, 'name': CreateVM_Task, 'duration_secs': 0.403839} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 680.311659] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 92c27615-d377-492f-a9db-ff45b2e71537] Created VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 680.318977] env[69367]: DEBUG oslo_concurrency.lockutils [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 680.318977] env[69367]: DEBUG oslo_concurrency.lockutils [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 680.319528] env[69367]: DEBUG oslo_concurrency.lockutils [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 680.320854] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-24b6243d-a2de-47c3-b884-089e87ba4484 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.328045] env[69367]: DEBUG oslo_vmware.api [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Waiting for the task: (returnval){ [ 680.328045] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52a1a7ba-b0f0-2ebe-932a-9513ff926ee9" [ 680.328045] env[69367]: _type = "Task" [ 680.328045] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 680.348287] env[69367]: DEBUG oslo_vmware.api [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52a1a7ba-b0f0-2ebe-932a-9513ff926ee9, 'name': SearchDatastore_Task, 'duration_secs': 0.012955} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 680.349345] env[69367]: DEBUG oslo_vmware.api [None req-39e01228-3e3c-4114-afc9-ee22a5731ffc tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Task: {'id': task-4233839, 'name': PowerOffVM_Task, 'duration_secs': 0.269887} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 680.352222] env[69367]: DEBUG oslo_concurrency.lockutils [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 680.353699] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] [instance: 92c27615-d377-492f-a9db-ff45b2e71537] Processing image 2b099420-9152-4d93-9609-4c9317824c11 {{(pid=69367) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 680.353699] env[69367]: DEBUG oslo_concurrency.lockutils [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 680.353699] env[69367]: DEBUG oslo_concurrency.lockutils [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 680.353699] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 680.353699] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-39e01228-3e3c-4114-afc9-ee22a5731ffc tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] [instance: ba4d981a-19f7-41ef-b7d1-a3f3830fe725] Powered off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 680.353894] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-39e01228-3e3c-4114-afc9-ee22a5731ffc tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] [instance: ba4d981a-19f7-41ef-b7d1-a3f3830fe725] Unregistering the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 680.354042] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ae93e2e0-392f-4af7-badd-fc54d8763d7a {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.356057] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-37c83245-e732-4d31-a95e-e63a96536099 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.370135] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 680.372972] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69367) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 680.372972] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d527c84f-5026-468c-be87-1d05efe325b8 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.393546] env[69367]: DEBUG oslo_vmware.api [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Waiting for the task: (returnval){ [ 680.393546] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52f49775-afac-add8-082e-d4b0316bc2a3" [ 680.393546] env[69367]: _type = "Task" [ 680.393546] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 680.406661] env[69367]: DEBUG oslo_vmware.api [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52f49775-afac-add8-082e-d4b0316bc2a3, 'name': SearchDatastore_Task, 'duration_secs': 0.01327} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 680.410184] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0dcda7a2-0db3-4e2f-9657-5aa15afc1709 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.417469] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e9a8e492-a9b6-49b5-a43e-ab2503d52af0 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 680.424111] env[69367]: DEBUG oslo_vmware.api [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Waiting for the task: (returnval){ [ 680.424111] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]5203389c-8337-5ef4-684f-39a1ac3d5b02" [ 680.424111] env[69367]: _type = "Task" [ 680.424111] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 680.433540] env[69367]: DEBUG oslo_vmware.api [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]5203389c-8337-5ef4-684f-39a1ac3d5b02, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.592660] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-39e01228-3e3c-4114-afc9-ee22a5731ffc tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] [instance: ba4d981a-19f7-41ef-b7d1-a3f3830fe725] Unregistered the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 680.593021] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-39e01228-3e3c-4114-afc9-ee22a5731ffc tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] [instance: ba4d981a-19f7-41ef-b7d1-a3f3830fe725] Deleting contents of the VM from datastore datastore2 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 680.595134] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-39e01228-3e3c-4114-afc9-ee22a5731ffc tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Deleting the datastore file [datastore2] ba4d981a-19f7-41ef-b7d1-a3f3830fe725 {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 680.595134] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e81a0c7c-a119-4b55-97f4-ab3cbdab7e9b {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.597825] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5097c34d-db23-4860-98ba-566ca071a5bc {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.607466] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-181e48cf-b11e-4169-9597-d164a1331fe5 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.612550] env[69367]: DEBUG oslo_vmware.api [None req-39e01228-3e3c-4114-afc9-ee22a5731ffc tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Waiting for the task: (returnval){ [ 680.612550] env[69367]: value = "task-4233844" [ 680.612550] env[69367]: _type = "Task" [ 680.612550] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 680.645107] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a34c9c6d-e38d-4163-9cc3-a39df0a2ad31 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Releasing lock "refresh_cache-f66c0467-a408-4e56-abdf-2c19cc3d9c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 680.645442] env[69367]: DEBUG nova.compute.manager [None req-a34c9c6d-e38d-4163-9cc3-a39df0a2ad31 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 680.645536] env[69367]: DEBUG nova.compute.manager [None req-a34c9c6d-e38d-4163-9cc3-a39df0a2ad31 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] [instance: f66c0467-a408-4e56-abdf-2c19cc3d9c11] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 680.645702] env[69367]: DEBUG nova.network.neutron [None req-a34c9c6d-e38d-4163-9cc3-a39df0a2ad31 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] [instance: f66c0467-a408-4e56-abdf-2c19cc3d9c11] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 680.649718] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82ae8682-f37c-420c-aef4-c494132b766c {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.656240] env[69367]: DEBUG oslo_vmware.api [None req-39e01228-3e3c-4114-afc9-ee22a5731ffc tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Task: {'id': task-4233844, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.669358] env[69367]: DEBUG oslo_vmware.api [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': task-4233835, 'name': PowerOnVM_Task, 'duration_secs': 0.704762} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 680.670293] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] Powered on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 680.670293] env[69367]: INFO nova.compute.manager [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] Took 14.22 seconds to spawn the instance on the hypervisor. [ 680.670293] env[69367]: DEBUG nova.compute.manager [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] Checking state {{(pid=69367) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 680.672227] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2847551-b8ec-4724-942f-9f3ab829f7a3 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.678109] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7372823-1b7a-4eeb-995d-e4665dc4cc31 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.696726] env[69367]: DEBUG nova.compute.provider_tree [None req-feb72e4c-b567-4182-bef0-66f7e1a58d17 tempest-ServerDiagnosticsNegativeTest-1764795038 tempest-ServerDiagnosticsNegativeTest-1764795038-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 680.699016] env[69367]: DEBUG nova.network.neutron [None req-a34c9c6d-e38d-4163-9cc3-a39df0a2ad31 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] [instance: f66c0467-a408-4e56-abdf-2c19cc3d9c11] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 680.704877] env[69367]: INFO nova.compute.manager [-] [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] Took 2.92 seconds to deallocate network for instance. [ 680.784150] env[69367]: DEBUG oslo_concurrency.lockutils [None req-bec6056e-20f1-4df8-b299-df21fda76884 tempest-ServersV294TestFqdnHostnames-2008691314 tempest-ServersV294TestFqdnHostnames-2008691314-project-member] Acquiring lock "73d75c52-7ac9-4a28-8bfd-855fba7950b6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 680.784150] env[69367]: DEBUG oslo_concurrency.lockutils [None req-bec6056e-20f1-4df8-b299-df21fda76884 tempest-ServersV294TestFqdnHostnames-2008691314 tempest-ServersV294TestFqdnHostnames-2008691314-project-member] Lock "73d75c52-7ac9-4a28-8bfd-855fba7950b6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 680.818556] env[69367]: INFO nova.scheduler.client.report [None req-0cf850bb-f4f3-4902-82da-ca6e283aae47 tempest-FloatingIPsAssociationNegativeTestJSON-1638525184 tempest-FloatingIPsAssociationNegativeTestJSON-1638525184-project-member] Deleted allocations for instance 8001cca4-9b9f-4425-b6e4-d27866395886 [ 680.935787] env[69367]: DEBUG oslo_vmware.api [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]5203389c-8337-5ef4-684f-39a1ac3d5b02, 'name': SearchDatastore_Task, 'duration_secs': 0.0112} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 680.935975] env[69367]: DEBUG oslo_concurrency.lockutils [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 680.936268] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore1] 92c27615-d377-492f-a9db-ff45b2e71537/92c27615-d377-492f-a9db-ff45b2e71537.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 680.936688] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5e0499c4-94d8-451f-ad13-43ebe519956c {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.945914] env[69367]: DEBUG oslo_vmware.api [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Waiting for the task: (returnval){ [ 680.945914] env[69367]: value = "task-4233845" [ 680.945914] env[69367]: _type = "Task" [ 680.945914] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 680.957137] env[69367]: DEBUG oslo_vmware.api [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Task: {'id': task-4233845, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.119438] env[69367]: DEBUG nova.network.neutron [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Successfully updated port: 5401116f-daf2-4db0-b052-7bd1adb63cc1 {{(pid=69367) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 681.130629] env[69367]: DEBUG oslo_vmware.api [None req-39e01228-3e3c-4114-afc9-ee22a5731ffc tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Task: {'id': task-4233844, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.190305} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 681.131190] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-39e01228-3e3c-4114-afc9-ee22a5731ffc tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Deleted the datastore file {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 681.131500] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-39e01228-3e3c-4114-afc9-ee22a5731ffc tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] [instance: ba4d981a-19f7-41ef-b7d1-a3f3830fe725] Deleted contents of the VM from datastore datastore2 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 681.131805] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-39e01228-3e3c-4114-afc9-ee22a5731ffc tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] [instance: ba4d981a-19f7-41ef-b7d1-a3f3830fe725] Instance destroyed {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 681.132144] env[69367]: INFO nova.compute.manager [None req-39e01228-3e3c-4114-afc9-ee22a5731ffc tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] [instance: ba4d981a-19f7-41ef-b7d1-a3f3830fe725] Took 1.36 seconds to destroy the instance on the hypervisor. [ 681.132529] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-39e01228-3e3c-4114-afc9-ee22a5731ffc tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 681.132865] env[69367]: DEBUG nova.compute.manager [-] [instance: ba4d981a-19f7-41ef-b7d1-a3f3830fe725] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 681.133106] env[69367]: DEBUG nova.network.neutron [-] [instance: ba4d981a-19f7-41ef-b7d1-a3f3830fe725] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 681.218193] env[69367]: DEBUG nova.network.neutron [None req-a34c9c6d-e38d-4163-9cc3-a39df0a2ad31 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] [instance: f66c0467-a408-4e56-abdf-2c19cc3d9c11] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 681.225484] env[69367]: DEBUG oslo_concurrency.lockutils [None req-4a4e2df7-def0-4f89-8bf6-a91349a96fef tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 681.226574] env[69367]: INFO nova.compute.manager [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] Took 40.64 seconds to build instance. [ 681.253039] env[69367]: ERROR nova.scheduler.client.report [None req-feb72e4c-b567-4182-bef0-66f7e1a58d17 tempest-ServerDiagnosticsNegativeTest-1764795038 tempest-ServerDiagnosticsNegativeTest-1764795038-project-member] [req-fb963283-b665-4639-9ee0-7ad4e192a978] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-fb963283-b665-4639-9ee0-7ad4e192a978"}]} [ 681.255675] env[69367]: DEBUG oslo_concurrency.lockutils [None req-feb72e4c-b567-4182-bef0-66f7e1a58d17 tempest-ServerDiagnosticsNegativeTest-1764795038 tempest-ServerDiagnosticsNegativeTest-1764795038-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.048s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 681.255675] env[69367]: ERROR nova.compute.manager [None req-feb72e4c-b567-4182-bef0-66f7e1a58d17 tempest-ServerDiagnosticsNegativeTest-1764795038 tempest-ServerDiagnosticsNegativeTest-1764795038-project-member] [instance: c7bc6ebd-d7fd-439a-829f-8f4bf2065623] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 681.255675] env[69367]: ERROR nova.compute.manager [instance: c7bc6ebd-d7fd-439a-829f-8f4bf2065623] Traceback (most recent call last): [ 681.255675] env[69367]: ERROR nova.compute.manager [instance: c7bc6ebd-d7fd-439a-829f-8f4bf2065623] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 681.255675] env[69367]: ERROR nova.compute.manager [instance: c7bc6ebd-d7fd-439a-829f-8f4bf2065623] yield [ 681.255675] env[69367]: ERROR nova.compute.manager [instance: c7bc6ebd-d7fd-439a-829f-8f4bf2065623] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 681.255675] env[69367]: ERROR nova.compute.manager [instance: c7bc6ebd-d7fd-439a-829f-8f4bf2065623] self.set_inventory_for_provider( [ 681.255675] env[69367]: ERROR nova.compute.manager [instance: c7bc6ebd-d7fd-439a-829f-8f4bf2065623] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 681.255675] env[69367]: ERROR nova.compute.manager [instance: c7bc6ebd-d7fd-439a-829f-8f4bf2065623] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 681.256660] env[69367]: ERROR nova.compute.manager [instance: c7bc6ebd-d7fd-439a-829f-8f4bf2065623] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-fb963283-b665-4639-9ee0-7ad4e192a978"}]} [ 681.256660] env[69367]: ERROR nova.compute.manager [instance: c7bc6ebd-d7fd-439a-829f-8f4bf2065623] [ 681.256660] env[69367]: ERROR nova.compute.manager [instance: c7bc6ebd-d7fd-439a-829f-8f4bf2065623] During handling of the above exception, another exception occurred: [ 681.256660] env[69367]: ERROR nova.compute.manager [instance: c7bc6ebd-d7fd-439a-829f-8f4bf2065623] [ 681.256660] env[69367]: ERROR nova.compute.manager [instance: c7bc6ebd-d7fd-439a-829f-8f4bf2065623] Traceback (most recent call last): [ 681.256660] env[69367]: ERROR nova.compute.manager [instance: c7bc6ebd-d7fd-439a-829f-8f4bf2065623] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 681.256660] env[69367]: ERROR nova.compute.manager [instance: c7bc6ebd-d7fd-439a-829f-8f4bf2065623] with self.rt.instance_claim(context, instance, node, allocs, [ 681.256660] env[69367]: ERROR nova.compute.manager [instance: c7bc6ebd-d7fd-439a-829f-8f4bf2065623] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 681.256660] env[69367]: ERROR nova.compute.manager [instance: c7bc6ebd-d7fd-439a-829f-8f4bf2065623] return f(*args, **kwargs) [ 681.256996] env[69367]: ERROR nova.compute.manager [instance: c7bc6ebd-d7fd-439a-829f-8f4bf2065623] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 681.256996] env[69367]: ERROR nova.compute.manager [instance: c7bc6ebd-d7fd-439a-829f-8f4bf2065623] self._update(elevated, cn) [ 681.256996] env[69367]: ERROR nova.compute.manager [instance: c7bc6ebd-d7fd-439a-829f-8f4bf2065623] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 681.256996] env[69367]: ERROR nova.compute.manager [instance: c7bc6ebd-d7fd-439a-829f-8f4bf2065623] self._update_to_placement(context, compute_node, startup) [ 681.256996] env[69367]: ERROR nova.compute.manager [instance: c7bc6ebd-d7fd-439a-829f-8f4bf2065623] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 681.256996] env[69367]: ERROR nova.compute.manager [instance: c7bc6ebd-d7fd-439a-829f-8f4bf2065623] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 681.256996] env[69367]: ERROR nova.compute.manager [instance: c7bc6ebd-d7fd-439a-829f-8f4bf2065623] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 681.256996] env[69367]: ERROR nova.compute.manager [instance: c7bc6ebd-d7fd-439a-829f-8f4bf2065623] return attempt.get(self._wrap_exception) [ 681.256996] env[69367]: ERROR nova.compute.manager [instance: c7bc6ebd-d7fd-439a-829f-8f4bf2065623] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 681.256996] env[69367]: ERROR nova.compute.manager [instance: c7bc6ebd-d7fd-439a-829f-8f4bf2065623] six.reraise(self.value[0], self.value[1], self.value[2]) [ 681.256996] env[69367]: ERROR nova.compute.manager [instance: c7bc6ebd-d7fd-439a-829f-8f4bf2065623] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 681.256996] env[69367]: ERROR nova.compute.manager [instance: c7bc6ebd-d7fd-439a-829f-8f4bf2065623] raise value [ 681.256996] env[69367]: ERROR nova.compute.manager [instance: c7bc6ebd-d7fd-439a-829f-8f4bf2065623] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 681.257463] env[69367]: ERROR nova.compute.manager [instance: c7bc6ebd-d7fd-439a-829f-8f4bf2065623] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 681.257463] env[69367]: ERROR nova.compute.manager [instance: c7bc6ebd-d7fd-439a-829f-8f4bf2065623] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 681.257463] env[69367]: ERROR nova.compute.manager [instance: c7bc6ebd-d7fd-439a-829f-8f4bf2065623] self.reportclient.update_from_provider_tree( [ 681.257463] env[69367]: ERROR nova.compute.manager [instance: c7bc6ebd-d7fd-439a-829f-8f4bf2065623] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 681.257463] env[69367]: ERROR nova.compute.manager [instance: c7bc6ebd-d7fd-439a-829f-8f4bf2065623] with catch_all(pd.uuid): [ 681.257463] env[69367]: ERROR nova.compute.manager [instance: c7bc6ebd-d7fd-439a-829f-8f4bf2065623] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 681.257463] env[69367]: ERROR nova.compute.manager [instance: c7bc6ebd-d7fd-439a-829f-8f4bf2065623] self.gen.throw(typ, value, traceback) [ 681.257463] env[69367]: ERROR nova.compute.manager [instance: c7bc6ebd-d7fd-439a-829f-8f4bf2065623] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 681.257463] env[69367]: ERROR nova.compute.manager [instance: c7bc6ebd-d7fd-439a-829f-8f4bf2065623] raise exception.ResourceProviderSyncFailed() [ 681.257463] env[69367]: ERROR nova.compute.manager [instance: c7bc6ebd-d7fd-439a-829f-8f4bf2065623] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 681.257463] env[69367]: ERROR nova.compute.manager [instance: c7bc6ebd-d7fd-439a-829f-8f4bf2065623] [ 681.257825] env[69367]: DEBUG nova.compute.utils [None req-feb72e4c-b567-4182-bef0-66f7e1a58d17 tempest-ServerDiagnosticsNegativeTest-1764795038 tempest-ServerDiagnosticsNegativeTest-1764795038-project-member] [instance: c7bc6ebd-d7fd-439a-829f-8f4bf2065623] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 681.257825] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0bdb360b-0170-4e12-942c-b15ee8c54cb9 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.629s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 681.257825] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0bdb360b-0170-4e12-942c-b15ee8c54cb9 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 681.257825] env[69367]: INFO nova.compute.manager [None req-0bdb360b-0170-4e12-942c-b15ee8c54cb9 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] [instance: a358ce6d-9826-4ddb-8c2f-51bac8db59d4] Successfully reverted task state from None on failure for instance. [ 681.259300] env[69367]: DEBUG oslo_concurrency.lockutils [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 24.601s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 681.259486] env[69367]: DEBUG nova.objects.instance [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69367) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 681.265028] env[69367]: ERROR oslo_messaging.rpc.server [None req-0bdb360b-0170-4e12-942c-b15ee8c54cb9 tempest-ServersAaction247Test-2053678796 tempest-ServersAaction247Test-2053678796-project-member] Exception during message handling: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 681.265028] env[69367]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 681.265028] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 681.265028] env[69367]: ERROR oslo_messaging.rpc.server yield [ 681.265028] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 681.265028] env[69367]: ERROR oslo_messaging.rpc.server self.set_inventory_for_provider( [ 681.265028] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 681.265028] env[69367]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 681.265028] env[69367]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-34cb8e2e-98a5-45f1-901f-90576656f5c2"}]} [ 681.265028] env[69367]: ERROR oslo_messaging.rpc.server [ 681.265635] env[69367]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 681.265635] env[69367]: ERROR oslo_messaging.rpc.server [ 681.265635] env[69367]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 681.265635] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 174, in _process_incoming [ 681.265635] env[69367]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 681.265635] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 681.265635] env[69367]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 681.265635] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 681.265635] env[69367]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 681.265635] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 681.265635] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 681.265635] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 681.265635] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 681.265635] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 681.265635] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 681.265635] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 681.265635] env[69367]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 681.265635] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 167, in decorated_function [ 681.266226] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 681.266226] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 681.266226] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 681.266226] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 681.266226] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 681.266226] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 158, in decorated_function [ 681.266226] env[69367]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 681.266226] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1483, in decorated_function [ 681.266226] env[69367]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 681.266226] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 214, in decorated_function [ 681.266226] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 681.266226] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 681.266226] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 681.266226] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 681.266226] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 681.266226] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 204, in decorated_function [ 681.266226] env[69367]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 681.266226] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3388, in terminate_instance [ 681.270198] env[69367]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 681.270198] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 681.270198] env[69367]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 681.270198] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3383, in do_terminate_instance [ 681.270198] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 681.270198] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 681.270198] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 681.270198] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 681.270198] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 681.270198] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 681.270198] env[69367]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 681.270198] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 681.270198] env[69367]: ERROR oslo_messaging.rpc.server self._complete_deletion(context, instance) [ 681.270198] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 681.270198] env[69367]: ERROR oslo_messaging.rpc.server self._update_resource_tracker(context, instance) [ 681.270198] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 681.270198] env[69367]: ERROR oslo_messaging.rpc.server self.rt.update_usage(context, instance, instance.node) [ 681.270198] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 681.270821] env[69367]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 681.270821] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 681.270821] env[69367]: ERROR oslo_messaging.rpc.server self._update(context.elevated(), self.compute_nodes[nodename]) [ 681.270821] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 681.270821] env[69367]: ERROR oslo_messaging.rpc.server self._update_to_placement(context, compute_node, startup) [ 681.270821] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 681.270821] env[69367]: ERROR oslo_messaging.rpc.server return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 681.270821] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 681.270821] env[69367]: ERROR oslo_messaging.rpc.server return attempt.get(self._wrap_exception) [ 681.270821] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 681.270821] env[69367]: ERROR oslo_messaging.rpc.server six.reraise(self.value[0], self.value[1], self.value[2]) [ 681.270821] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 681.270821] env[69367]: ERROR oslo_messaging.rpc.server raise value [ 681.270821] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 681.270821] env[69367]: ERROR oslo_messaging.rpc.server attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 681.270821] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 681.270821] env[69367]: ERROR oslo_messaging.rpc.server self.reportclient.update_from_provider_tree( [ 681.271305] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 681.271305] env[69367]: ERROR oslo_messaging.rpc.server with catch_all(pd.uuid): [ 681.271305] env[69367]: ERROR oslo_messaging.rpc.server File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 681.271305] env[69367]: ERROR oslo_messaging.rpc.server self.gen.throw(typ, value, traceback) [ 681.271305] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 681.271305] env[69367]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderSyncFailed() [ 681.271305] env[69367]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 681.271305] env[69367]: ERROR oslo_messaging.rpc.server [ 681.271305] env[69367]: DEBUG nova.compute.manager [None req-feb72e4c-b567-4182-bef0-66f7e1a58d17 tempest-ServerDiagnosticsNegativeTest-1764795038 tempest-ServerDiagnosticsNegativeTest-1764795038-project-member] [instance: c7bc6ebd-d7fd-439a-829f-8f4bf2065623] Build of instance c7bc6ebd-d7fd-439a-829f-8f4bf2065623 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 681.271305] env[69367]: DEBUG nova.compute.manager [None req-feb72e4c-b567-4182-bef0-66f7e1a58d17 tempest-ServerDiagnosticsNegativeTest-1764795038 tempest-ServerDiagnosticsNegativeTest-1764795038-project-member] [instance: c7bc6ebd-d7fd-439a-829f-8f4bf2065623] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 681.271591] env[69367]: DEBUG oslo_concurrency.lockutils [None req-feb72e4c-b567-4182-bef0-66f7e1a58d17 tempest-ServerDiagnosticsNegativeTest-1764795038 tempest-ServerDiagnosticsNegativeTest-1764795038-project-member] Acquiring lock "refresh_cache-c7bc6ebd-d7fd-439a-829f-8f4bf2065623" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 681.271591] env[69367]: DEBUG oslo_concurrency.lockutils [None req-feb72e4c-b567-4182-bef0-66f7e1a58d17 tempest-ServerDiagnosticsNegativeTest-1764795038 tempest-ServerDiagnosticsNegativeTest-1764795038-project-member] Acquired lock "refresh_cache-c7bc6ebd-d7fd-439a-829f-8f4bf2065623" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 681.271591] env[69367]: DEBUG nova.network.neutron [None req-feb72e4c-b567-4182-bef0-66f7e1a58d17 tempest-ServerDiagnosticsNegativeTest-1764795038 tempest-ServerDiagnosticsNegativeTest-1764795038-project-member] [instance: c7bc6ebd-d7fd-439a-829f-8f4bf2065623] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 681.331338] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0cf850bb-f4f3-4902-82da-ca6e283aae47 tempest-FloatingIPsAssociationNegativeTestJSON-1638525184 tempest-FloatingIPsAssociationNegativeTestJSON-1638525184-project-member] Lock "8001cca4-9b9f-4425-b6e4-d27866395886" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.210s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 681.461781] env[69367]: DEBUG oslo_vmware.api [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Task: {'id': task-4233845, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.632714] env[69367]: DEBUG oslo_concurrency.lockutils [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Acquiring lock "refresh_cache-d2f8328d-fd05-4e63-9cbd-a6e3ec948964" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 681.632714] env[69367]: DEBUG oslo_concurrency.lockutils [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Acquired lock "refresh_cache-d2f8328d-fd05-4e63-9cbd-a6e3ec948964" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 681.632714] env[69367]: DEBUG nova.network.neutron [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 681.727250] env[69367]: INFO nova.compute.manager [None req-a34c9c6d-e38d-4163-9cc3-a39df0a2ad31 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] [instance: f66c0467-a408-4e56-abdf-2c19cc3d9c11] Took 1.08 seconds to deallocate network for instance. [ 681.735273] env[69367]: DEBUG oslo_concurrency.lockutils [None req-590c774b-b76a-47d6-b0f7-c97f1b6ecfd3 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Lock "c17525ee-d038-4c81-932b-ed74a6de6cb5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.395s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 681.837627] env[69367]: DEBUG nova.compute.manager [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 681.958346] env[69367]: DEBUG oslo_vmware.api [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Task: {'id': task-4233845, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.544478} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 681.958615] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore1] 92c27615-d377-492f-a9db-ff45b2e71537/92c27615-d377-492f-a9db-ff45b2e71537.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 681.958834] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] [instance: 92c27615-d377-492f-a9db-ff45b2e71537] Extending root virtual disk to 1048576 {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 681.959102] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b0eca29d-c78b-48e2-8a0a-82296638c16d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.967640] env[69367]: DEBUG oslo_vmware.api [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Waiting for the task: (returnval){ [ 681.967640] env[69367]: value = "task-4233846" [ 681.967640] env[69367]: _type = "Task" [ 681.967640] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 681.984343] env[69367]: DEBUG oslo_vmware.api [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Task: {'id': task-4233846, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.985334] env[69367]: DEBUG nova.network.neutron [None req-feb72e4c-b567-4182-bef0-66f7e1a58d17 tempest-ServerDiagnosticsNegativeTest-1764795038 tempest-ServerDiagnosticsNegativeTest-1764795038-project-member] [instance: c7bc6ebd-d7fd-439a-829f-8f4bf2065623] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 682.087956] env[69367]: DEBUG nova.network.neutron [None req-feb72e4c-b567-4182-bef0-66f7e1a58d17 tempest-ServerDiagnosticsNegativeTest-1764795038 tempest-ServerDiagnosticsNegativeTest-1764795038-project-member] [instance: c7bc6ebd-d7fd-439a-829f-8f4bf2065623] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 682.197928] env[69367]: DEBUG nova.network.neutron [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 682.240944] env[69367]: DEBUG nova.compute.manager [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] [instance: bdc0938b-60ef-463a-b3fd-1754f38a3b79] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 682.279189] env[69367]: DEBUG oslo_concurrency.lockutils [None req-33acfe44-1c86-4d68-9fe6-a9daa0bdae37 tempest-ServersAdmin275Test-153682517 tempest-ServersAdmin275Test-153682517-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.020s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 682.281463] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2da95dcb-8d53-4ea0-a385-7bc1951f8313 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.940s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 682.282228] env[69367]: DEBUG nova.objects.instance [None req-2da95dcb-8d53-4ea0-a385-7bc1951f8313 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Lazy-loading 'resources' on Instance uuid 4e346ed1-36e9-421d-975f-e8bb6f05c0a0 {{(pid=69367) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 682.351722] env[69367]: DEBUG oslo_concurrency.lockutils [None req-1f53a5e1-fbe5-4540-af3f-45cfe8ec9f64 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Acquiring lock "25be9c82-df06-498d-b5e7-c59e0ceed475" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 682.351722] env[69367]: DEBUG oslo_concurrency.lockutils [None req-1f53a5e1-fbe5-4540-af3f-45cfe8ec9f64 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Lock "25be9c82-df06-498d-b5e7-c59e0ceed475" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 682.356037] env[69367]: DEBUG nova.compute.manager [req-7bfe9eed-3e4f-4594-a1bf-6b4e5a353914 req-9755b350-6a70-469b-9503-251dcd43fc27 service nova] [instance: 92c27615-d377-492f-a9db-ff45b2e71537] Received event network-changed-495ec91b-986e-4b74-820f-28ae7f03a86a {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 682.356685] env[69367]: DEBUG nova.compute.manager [req-7bfe9eed-3e4f-4594-a1bf-6b4e5a353914 req-9755b350-6a70-469b-9503-251dcd43fc27 service nova] [instance: 92c27615-d377-492f-a9db-ff45b2e71537] Refreshing instance network info cache due to event network-changed-495ec91b-986e-4b74-820f-28ae7f03a86a. {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11772}} [ 682.356685] env[69367]: DEBUG oslo_concurrency.lockutils [req-7bfe9eed-3e4f-4594-a1bf-6b4e5a353914 req-9755b350-6a70-469b-9503-251dcd43fc27 service nova] Acquiring lock "refresh_cache-92c27615-d377-492f-a9db-ff45b2e71537" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 682.356685] env[69367]: DEBUG oslo_concurrency.lockutils [req-7bfe9eed-3e4f-4594-a1bf-6b4e5a353914 req-9755b350-6a70-469b-9503-251dcd43fc27 service nova] Acquired lock "refresh_cache-92c27615-d377-492f-a9db-ff45b2e71537" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 682.356818] env[69367]: DEBUG nova.network.neutron [req-7bfe9eed-3e4f-4594-a1bf-6b4e5a353914 req-9755b350-6a70-469b-9503-251dcd43fc27 service nova] [instance: 92c27615-d377-492f-a9db-ff45b2e71537] Refreshing network info cache for port 495ec91b-986e-4b74-820f-28ae7f03a86a {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 682.377552] env[69367]: DEBUG oslo_concurrency.lockutils [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 682.477549] env[69367]: DEBUG nova.network.neutron [-] [instance: ba4d981a-19f7-41ef-b7d1-a3f3830fe725] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 682.490910] env[69367]: DEBUG oslo_vmware.api [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Task: {'id': task-4233846, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.118098} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 682.491268] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] [instance: 92c27615-d377-492f-a9db-ff45b2e71537] Extended root virtual disk {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 682.495286] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9ba4fa6-b50a-487f-b77a-d376171a10dc {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.501596] env[69367]: DEBUG nova.network.neutron [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Updating instance_info_cache with network_info: [{"id": "5401116f-daf2-4db0-b052-7bd1adb63cc1", "address": "fa:16:3e:d1:24:7a", "network": {"id": "341b1aa4-f821-4feb-8230-bd176112b3c3", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-2139182269-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d73aa45fc35b44c6a8a2dd8ef127b974", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ea16b8c-ebf6-4bd4-ab67-ddb472e7b460", "external-id": "nsx-vlan-transportzone-156", "segmentation_id": 156, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5401116f-da", "ovs_interfaceid": "5401116f-daf2-4db0-b052-7bd1adb63cc1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 682.526577] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] [instance: 92c27615-d377-492f-a9db-ff45b2e71537] Reconfiguring VM instance instance-00000017 to attach disk [datastore1] 92c27615-d377-492f-a9db-ff45b2e71537/92c27615-d377-492f-a9db-ff45b2e71537.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 682.529610] env[69367]: DEBUG oslo_concurrency.lockutils [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Releasing lock "refresh_cache-d2f8328d-fd05-4e63-9cbd-a6e3ec948964" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 682.529651] env[69367]: DEBUG nova.compute.manager [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Instance network_info: |[{"id": "5401116f-daf2-4db0-b052-7bd1adb63cc1", "address": "fa:16:3e:d1:24:7a", "network": {"id": "341b1aa4-f821-4feb-8230-bd176112b3c3", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-2139182269-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d73aa45fc35b44c6a8a2dd8ef127b974", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ea16b8c-ebf6-4bd4-ab67-ddb472e7b460", "external-id": "nsx-vlan-transportzone-156", "segmentation_id": 156, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5401116f-da", "ovs_interfaceid": "5401116f-daf2-4db0-b052-7bd1adb63cc1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69367) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 682.530014] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-48bf9b67-97db-4925-97b8-71b4b77ea0f3 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.551386] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d1:24:7a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8ea16b8c-ebf6-4bd4-ab67-ddb472e7b460', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5401116f-daf2-4db0-b052-7bd1adb63cc1', 'vif_model': 'vmxnet3'}] {{(pid=69367) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 682.560609] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Creating folder: Project (d73aa45fc35b44c6a8a2dd8ef127b974). Parent ref: group-v837645. {{(pid=69367) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 682.560687] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a07264c1-50d9-42d0-85ca-465447ba6108 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.570938] env[69367]: DEBUG oslo_vmware.api [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Waiting for the task: (returnval){ [ 682.570938] env[69367]: value = "task-4233848" [ 682.570938] env[69367]: _type = "Task" [ 682.570938] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.577822] env[69367]: INFO nova.virt.vmwareapi.vm_util [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Created folder: Project (d73aa45fc35b44c6a8a2dd8ef127b974) in parent group-v837645. [ 682.577822] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Creating folder: Instances. Parent ref: group-v837696. {{(pid=69367) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 682.585970] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-01c5aa0d-ddfd-448d-8c76-9fe40e95b602 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.588087] env[69367]: DEBUG oslo_vmware.api [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Task: {'id': task-4233848, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.593336] env[69367]: DEBUG oslo_concurrency.lockutils [None req-feb72e4c-b567-4182-bef0-66f7e1a58d17 tempest-ServerDiagnosticsNegativeTest-1764795038 tempest-ServerDiagnosticsNegativeTest-1764795038-project-member] Releasing lock "refresh_cache-c7bc6ebd-d7fd-439a-829f-8f4bf2065623" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 682.593336] env[69367]: DEBUG nova.compute.manager [None req-feb72e4c-b567-4182-bef0-66f7e1a58d17 tempest-ServerDiagnosticsNegativeTest-1764795038 tempest-ServerDiagnosticsNegativeTest-1764795038-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 682.593336] env[69367]: DEBUG nova.compute.manager [None req-feb72e4c-b567-4182-bef0-66f7e1a58d17 tempest-ServerDiagnosticsNegativeTest-1764795038 tempest-ServerDiagnosticsNegativeTest-1764795038-project-member] [instance: c7bc6ebd-d7fd-439a-829f-8f4bf2065623] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 682.593336] env[69367]: DEBUG nova.network.neutron [None req-feb72e4c-b567-4182-bef0-66f7e1a58d17 tempest-ServerDiagnosticsNegativeTest-1764795038 tempest-ServerDiagnosticsNegativeTest-1764795038-project-member] [instance: c7bc6ebd-d7fd-439a-829f-8f4bf2065623] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 682.601484] env[69367]: INFO nova.virt.vmwareapi.vm_util [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Created folder: Instances in parent group-v837696. [ 682.603416] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 682.603416] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Creating VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 682.603416] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-03d89ea6-9a24-4d97-9bd6-9e10eca7265d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.627858] env[69367]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 682.627858] env[69367]: value = "task-4233850" [ 682.627858] env[69367]: _type = "Task" [ 682.627858] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.640994] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4233850, 'name': CreateVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.673392] env[69367]: DEBUG nova.network.neutron [None req-feb72e4c-b567-4182-bef0-66f7e1a58d17 tempest-ServerDiagnosticsNegativeTest-1764795038 tempest-ServerDiagnosticsNegativeTest-1764795038-project-member] [instance: c7bc6ebd-d7fd-439a-829f-8f4bf2065623] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 682.781554] env[69367]: DEBUG oslo_concurrency.lockutils [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 682.784258] env[69367]: INFO nova.scheduler.client.report [None req-a34c9c6d-e38d-4163-9cc3-a39df0a2ad31 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Deleted allocations for instance f66c0467-a408-4e56-abdf-2c19cc3d9c11 [ 682.825790] env[69367]: DEBUG nova.scheduler.client.report [None req-2da95dcb-8d53-4ea0-a385-7bc1951f8313 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 682.856548] env[69367]: DEBUG nova.scheduler.client.report [None req-2da95dcb-8d53-4ea0-a385-7bc1951f8313 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 682.856831] env[69367]: DEBUG nova.compute.provider_tree [None req-2da95dcb-8d53-4ea0-a385-7bc1951f8313 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 682.877816] env[69367]: DEBUG nova.scheduler.client.report [None req-2da95dcb-8d53-4ea0-a385-7bc1951f8313 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 682.888050] env[69367]: DEBUG oslo_concurrency.lockutils [None req-4d7c067b-2e06-4f0e-b1ed-f10936ffec18 tempest-ServersNegativeTestMultiTenantJSON-1342218623 tempest-ServersNegativeTestMultiTenantJSON-1342218623-project-member] Acquiring lock "13c1b1aa-2190-4d28-81ad-697e4c098897" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 682.888050] env[69367]: DEBUG oslo_concurrency.lockutils [None req-4d7c067b-2e06-4f0e-b1ed-f10936ffec18 tempest-ServersNegativeTestMultiTenantJSON-1342218623 tempest-ServersNegativeTestMultiTenantJSON-1342218623-project-member] Lock "13c1b1aa-2190-4d28-81ad-697e4c098897" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 682.920515] env[69367]: DEBUG nova.scheduler.client.report [None req-2da95dcb-8d53-4ea0-a385-7bc1951f8313 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 682.986132] env[69367]: INFO nova.compute.manager [-] [instance: ba4d981a-19f7-41ef-b7d1-a3f3830fe725] Took 1.85 seconds to deallocate network for instance. [ 683.091210] env[69367]: DEBUG oslo_vmware.api [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Task: {'id': task-4233848, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.143867] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4233850, 'name': CreateVM_Task} progress is 99%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.177288] env[69367]: DEBUG nova.network.neutron [None req-feb72e4c-b567-4182-bef0-66f7e1a58d17 tempest-ServerDiagnosticsNegativeTest-1764795038 tempest-ServerDiagnosticsNegativeTest-1764795038-project-member] [instance: c7bc6ebd-d7fd-439a-829f-8f4bf2065623] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 683.307656] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a34c9c6d-e38d-4163-9cc3-a39df0a2ad31 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Lock "f66c0467-a408-4e56-abdf-2c19cc3d9c11" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.329s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 683.460601] env[69367]: DEBUG nova.network.neutron [req-7bfe9eed-3e4f-4594-a1bf-6b4e5a353914 req-9755b350-6a70-469b-9503-251dcd43fc27 service nova] [instance: 92c27615-d377-492f-a9db-ff45b2e71537] Updated VIF entry in instance network info cache for port 495ec91b-986e-4b74-820f-28ae7f03a86a. {{(pid=69367) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 683.460947] env[69367]: DEBUG nova.network.neutron [req-7bfe9eed-3e4f-4594-a1bf-6b4e5a353914 req-9755b350-6a70-469b-9503-251dcd43fc27 service nova] [instance: 92c27615-d377-492f-a9db-ff45b2e71537] Updating instance_info_cache with network_info: [{"id": "495ec91b-986e-4b74-820f-28ae7f03a86a", "address": "fa:16:3e:6f:58:18", "network": {"id": "c3914068-abfe-4a12-9775-bc4851a19549", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-393561532-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e332ecec6c1c43c18345d8a2761d98be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9079d3b9-5c2d-4ca1-8d2f-68ceb8ec8c98", "external-id": "nsx-vlan-transportzone-527", "segmentation_id": 527, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap495ec91b-98", "ovs_interfaceid": "495ec91b-986e-4b74-820f-28ae7f03a86a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 683.487360] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b402644a-4b89-4ec2-b833-41195f9f1680 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.496988] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0921b7c8-544b-4274-8a20-35c4c75e8851 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.502949] env[69367]: DEBUG oslo_concurrency.lockutils [None req-39e01228-3e3c-4114-afc9-ee22a5731ffc tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 683.534871] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c69552d4-8d5a-452f-9877-0dffb0bc5908 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.543510] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebf699cc-50b6-43dc-9c67-29d2f3dad73e {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.558814] env[69367]: DEBUG nova.compute.provider_tree [None req-2da95dcb-8d53-4ea0-a385-7bc1951f8313 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 683.586333] env[69367]: DEBUG oslo_vmware.api [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Task: {'id': task-4233848, 'name': ReconfigVM_Task, 'duration_secs': 0.588193} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.586333] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] [instance: 92c27615-d377-492f-a9db-ff45b2e71537] Reconfigured VM instance instance-00000017 to attach disk [datastore1] 92c27615-d377-492f-a9db-ff45b2e71537/92c27615-d377-492f-a9db-ff45b2e71537.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 683.588094] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5d897bdb-1916-4794-a571-1d3f1234a347 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.595158] env[69367]: DEBUG oslo_vmware.api [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Waiting for the task: (returnval){ [ 683.595158] env[69367]: value = "task-4233852" [ 683.595158] env[69367]: _type = "Task" [ 683.595158] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.605522] env[69367]: DEBUG oslo_vmware.api [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Task: {'id': task-4233852, 'name': Rename_Task} progress is 5%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.646225] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4233850, 'name': CreateVM_Task, 'duration_secs': 0.533591} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.646225] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Created VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 683.646225] env[69367]: DEBUG oslo_concurrency.lockutils [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 683.646225] env[69367]: DEBUG oslo_concurrency.lockutils [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 683.646225] env[69367]: DEBUG oslo_concurrency.lockutils [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 683.646225] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f7a48470-e6bf-4d36-b07c-1117d63c33f4 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.651614] env[69367]: DEBUG oslo_vmware.api [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Waiting for the task: (returnval){ [ 683.651614] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52cec871-c374-bc1b-ffb2-cd80ae35b803" [ 683.651614] env[69367]: _type = "Task" [ 683.651614] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.673347] env[69367]: DEBUG oslo_vmware.api [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52cec871-c374-bc1b-ffb2-cd80ae35b803, 'name': SearchDatastore_Task, 'duration_secs': 0.011994} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.675796] env[69367]: DEBUG oslo_concurrency.lockutils [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 683.676130] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Processing image 2b099420-9152-4d93-9609-4c9317824c11 {{(pid=69367) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 683.676424] env[69367]: DEBUG oslo_concurrency.lockutils [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 683.676597] env[69367]: DEBUG oslo_concurrency.lockutils [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 683.676788] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 683.677984] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-182bc044-3cbe-40bd-a087-7ddfd61b441e {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.687405] env[69367]: INFO nova.compute.manager [None req-feb72e4c-b567-4182-bef0-66f7e1a58d17 tempest-ServerDiagnosticsNegativeTest-1764795038 tempest-ServerDiagnosticsNegativeTest-1764795038-project-member] [instance: c7bc6ebd-d7fd-439a-829f-8f4bf2065623] Took 1.09 seconds to deallocate network for instance. [ 683.695016] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 683.695325] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69367) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 683.696433] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-42b059e4-6aa9-43ac-bd44-59015d805bf2 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.708783] env[69367]: DEBUG oslo_vmware.api [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Waiting for the task: (returnval){ [ 683.708783] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52856c9f-b77b-437b-fea0-69aac3bc79db" [ 683.708783] env[69367]: _type = "Task" [ 683.708783] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.716549] env[69367]: DEBUG oslo_vmware.api [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52856c9f-b77b-437b-fea0-69aac3bc79db, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.811918] env[69367]: DEBUG nova.compute.manager [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 683.966140] env[69367]: DEBUG oslo_concurrency.lockutils [req-7bfe9eed-3e4f-4594-a1bf-6b4e5a353914 req-9755b350-6a70-469b-9503-251dcd43fc27 service nova] Releasing lock "refresh_cache-92c27615-d377-492f-a9db-ff45b2e71537" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 683.966817] env[69367]: DEBUG nova.compute.manager [req-7bfe9eed-3e4f-4594-a1bf-6b4e5a353914 req-9755b350-6a70-469b-9503-251dcd43fc27 service nova] [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] Received event network-vif-deleted-b6a0688d-a5a2-4937-9ac7-25b53f9b001d {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 684.090248] env[69367]: ERROR nova.scheduler.client.report [None req-2da95dcb-8d53-4ea0-a385-7bc1951f8313 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] [req-7d30141e-169b-4157-a035-edc102a94e96] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-7d30141e-169b-4157-a035-edc102a94e96"}]} [ 684.090650] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2da95dcb-8d53-4ea0-a385-7bc1951f8313 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.809s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 684.091251] env[69367]: ERROR nova.compute.manager [None req-2da95dcb-8d53-4ea0-a385-7bc1951f8313 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Setting instance vm_state to ERROR: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 684.091251] env[69367]: ERROR nova.compute.manager [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Traceback (most recent call last): [ 684.091251] env[69367]: ERROR nova.compute.manager [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 684.091251] env[69367]: ERROR nova.compute.manager [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] yield [ 684.091251] env[69367]: ERROR nova.compute.manager [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 684.091251] env[69367]: ERROR nova.compute.manager [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] self.set_inventory_for_provider( [ 684.091251] env[69367]: ERROR nova.compute.manager [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 684.091251] env[69367]: ERROR nova.compute.manager [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 684.091486] env[69367]: ERROR nova.compute.manager [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-7d30141e-169b-4157-a035-edc102a94e96"}]} [ 684.091486] env[69367]: ERROR nova.compute.manager [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] [ 684.091486] env[69367]: ERROR nova.compute.manager [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] During handling of the above exception, another exception occurred: [ 684.091486] env[69367]: ERROR nova.compute.manager [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] [ 684.091486] env[69367]: ERROR nova.compute.manager [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Traceback (most recent call last): [ 684.091486] env[69367]: ERROR nova.compute.manager [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 684.091486] env[69367]: ERROR nova.compute.manager [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] self._delete_instance(context, instance, bdms) [ 684.091486] env[69367]: ERROR nova.compute.manager [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 684.091486] env[69367]: ERROR nova.compute.manager [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] self._complete_deletion(context, instance) [ 684.091744] env[69367]: ERROR nova.compute.manager [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 684.091744] env[69367]: ERROR nova.compute.manager [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] self._update_resource_tracker(context, instance) [ 684.091744] env[69367]: ERROR nova.compute.manager [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 684.091744] env[69367]: ERROR nova.compute.manager [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] self.rt.update_usage(context, instance, instance.node) [ 684.091744] env[69367]: ERROR nova.compute.manager [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 684.091744] env[69367]: ERROR nova.compute.manager [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] return f(*args, **kwargs) [ 684.091744] env[69367]: ERROR nova.compute.manager [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 684.091744] env[69367]: ERROR nova.compute.manager [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] self._update(context.elevated(), self.compute_nodes[nodename]) [ 684.091744] env[69367]: ERROR nova.compute.manager [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 684.091744] env[69367]: ERROR nova.compute.manager [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] self._update_to_placement(context, compute_node, startup) [ 684.091744] env[69367]: ERROR nova.compute.manager [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 684.091744] env[69367]: ERROR nova.compute.manager [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 684.092117] env[69367]: ERROR nova.compute.manager [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 684.092117] env[69367]: ERROR nova.compute.manager [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] return attempt.get(self._wrap_exception) [ 684.092117] env[69367]: ERROR nova.compute.manager [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 684.092117] env[69367]: ERROR nova.compute.manager [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] six.reraise(self.value[0], self.value[1], self.value[2]) [ 684.092117] env[69367]: ERROR nova.compute.manager [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 684.092117] env[69367]: ERROR nova.compute.manager [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] raise value [ 684.092117] env[69367]: ERROR nova.compute.manager [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 684.092117] env[69367]: ERROR nova.compute.manager [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 684.092117] env[69367]: ERROR nova.compute.manager [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 684.092117] env[69367]: ERROR nova.compute.manager [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] self.reportclient.update_from_provider_tree( [ 684.092117] env[69367]: ERROR nova.compute.manager [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 684.092117] env[69367]: ERROR nova.compute.manager [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] with catch_all(pd.uuid): [ 684.092117] env[69367]: ERROR nova.compute.manager [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 684.092462] env[69367]: ERROR nova.compute.manager [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] self.gen.throw(typ, value, traceback) [ 684.092462] env[69367]: ERROR nova.compute.manager [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 684.092462] env[69367]: ERROR nova.compute.manager [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] raise exception.ResourceProviderSyncFailed() [ 684.092462] env[69367]: ERROR nova.compute.manager [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 684.092462] env[69367]: ERROR nova.compute.manager [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] [ 684.093732] env[69367]: DEBUG oslo_concurrency.lockutils [None req-6de81446-9515-41d1-9d8b-68fdd2bfc9f0 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.635s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 684.095189] env[69367]: INFO nova.compute.claims [None req-6de81446-9515-41d1-9d8b-68fdd2bfc9f0 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] [instance: 5aac4b74-0c19-43e2-a2ce-5bbb6c731dcd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 684.115750] env[69367]: DEBUG oslo_vmware.api [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Task: {'id': task-4233852, 'name': Rename_Task, 'duration_secs': 0.350056} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.118289] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] [instance: 92c27615-d377-492f-a9db-ff45b2e71537] Powering on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 684.118822] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8c3a995c-f32b-45fb-91af-93bb63ff72ff {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.128416] env[69367]: DEBUG oslo_vmware.api [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Waiting for the task: (returnval){ [ 684.128416] env[69367]: value = "task-4233853" [ 684.128416] env[69367]: _type = "Task" [ 684.128416] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.148532] env[69367]: DEBUG oslo_vmware.api [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Task: {'id': task-4233853, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.221939] env[69367]: DEBUG oslo_vmware.api [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52856c9f-b77b-437b-fea0-69aac3bc79db, 'name': SearchDatastore_Task, 'duration_secs': 0.011095} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.223381] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d3d0134e-ea20-4274-b983-8e0f14329625 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.233352] env[69367]: DEBUG oslo_vmware.api [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Waiting for the task: (returnval){ [ 684.233352] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52c2f96d-cbfe-1ee9-4b45-edc0e25dda08" [ 684.233352] env[69367]: _type = "Task" [ 684.233352] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.244968] env[69367]: DEBUG oslo_vmware.api [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52c2f96d-cbfe-1ee9-4b45-edc0e25dda08, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.346690] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 684.426304] env[69367]: DEBUG oslo_concurrency.lockutils [None req-02258f01-0f7a-47d5-8f19-e7e1d8fc8a57 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Acquiring lock "c17525ee-d038-4c81-932b-ed74a6de6cb5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 684.426572] env[69367]: DEBUG oslo_concurrency.lockutils [None req-02258f01-0f7a-47d5-8f19-e7e1d8fc8a57 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Lock "c17525ee-d038-4c81-932b-ed74a6de6cb5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 684.426783] env[69367]: DEBUG oslo_concurrency.lockutils [None req-02258f01-0f7a-47d5-8f19-e7e1d8fc8a57 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Acquiring lock "c17525ee-d038-4c81-932b-ed74a6de6cb5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 684.426971] env[69367]: DEBUG oslo_concurrency.lockutils [None req-02258f01-0f7a-47d5-8f19-e7e1d8fc8a57 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Lock "c17525ee-d038-4c81-932b-ed74a6de6cb5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 684.427218] env[69367]: DEBUG oslo_concurrency.lockutils [None req-02258f01-0f7a-47d5-8f19-e7e1d8fc8a57 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Lock "c17525ee-d038-4c81-932b-ed74a6de6cb5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 684.430853] env[69367]: INFO nova.compute.manager [None req-02258f01-0f7a-47d5-8f19-e7e1d8fc8a57 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] Terminating instance [ 684.606271] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2da95dcb-8d53-4ea0-a385-7bc1951f8313 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Lock "4e346ed1-36e9-421d-975f-e8bb6f05c0a0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.945s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 684.646043] env[69367]: DEBUG oslo_vmware.api [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Task: {'id': task-4233853, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.738637] env[69367]: INFO nova.scheduler.client.report [None req-feb72e4c-b567-4182-bef0-66f7e1a58d17 tempest-ServerDiagnosticsNegativeTest-1764795038 tempest-ServerDiagnosticsNegativeTest-1764795038-project-member] Deleted allocations for instance c7bc6ebd-d7fd-439a-829f-8f4bf2065623 [ 684.754525] env[69367]: DEBUG oslo_vmware.api [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52c2f96d-cbfe-1ee9-4b45-edc0e25dda08, 'name': SearchDatastore_Task, 'duration_secs': 0.011527} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.755433] env[69367]: DEBUG oslo_concurrency.lockutils [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 684.755433] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore1] d2f8328d-fd05-4e63-9cbd-a6e3ec948964/d2f8328d-fd05-4e63-9cbd-a6e3ec948964.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 684.755433] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c05bdd6e-9341-4b8e-881c-f1c81338fdcd {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.765787] env[69367]: DEBUG oslo_vmware.api [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Waiting for the task: (returnval){ [ 684.765787] env[69367]: value = "task-4233854" [ 684.765787] env[69367]: _type = "Task" [ 684.765787] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.775829] env[69367]: DEBUG oslo_vmware.api [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Task: {'id': task-4233854, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.934732] env[69367]: DEBUG nova.compute.manager [None req-02258f01-0f7a-47d5-8f19-e7e1d8fc8a57 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] Start destroying the instance on the hypervisor. {{(pid=69367) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 684.934977] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-02258f01-0f7a-47d5-8f19-e7e1d8fc8a57 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] Destroying instance {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 684.936222] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ab26e3a-2c2e-4f4b-a68c-d94dc671b246 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.953526] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-02258f01-0f7a-47d5-8f19-e7e1d8fc8a57 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] Powering off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 684.953918] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b8018106-0ba1-43c7-a9bb-7bc866212da6 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.964156] env[69367]: DEBUG oslo_vmware.api [None req-02258f01-0f7a-47d5-8f19-e7e1d8fc8a57 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Waiting for the task: (returnval){ [ 684.964156] env[69367]: value = "task-4233855" [ 684.964156] env[69367]: _type = "Task" [ 684.964156] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.977032] env[69367]: DEBUG oslo_vmware.api [None req-02258f01-0f7a-47d5-8f19-e7e1d8fc8a57 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': task-4233855, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.105587] env[69367]: DEBUG nova.compute.manager [req-df4913bb-ec04-4f66-bb36-f86459a8c092 req-d17c9cc3-96bc-43b2-8c51-1c68668083bc service nova] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Received event network-vif-plugged-5401116f-daf2-4db0-b052-7bd1adb63cc1 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 685.105587] env[69367]: DEBUG oslo_concurrency.lockutils [req-df4913bb-ec04-4f66-bb36-f86459a8c092 req-d17c9cc3-96bc-43b2-8c51-1c68668083bc service nova] Acquiring lock "d2f8328d-fd05-4e63-9cbd-a6e3ec948964-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 685.105587] env[69367]: DEBUG oslo_concurrency.lockutils [req-df4913bb-ec04-4f66-bb36-f86459a8c092 req-d17c9cc3-96bc-43b2-8c51-1c68668083bc service nova] Lock "d2f8328d-fd05-4e63-9cbd-a6e3ec948964-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 685.105587] env[69367]: DEBUG oslo_concurrency.lockutils [req-df4913bb-ec04-4f66-bb36-f86459a8c092 req-d17c9cc3-96bc-43b2-8c51-1c68668083bc service nova] Lock "d2f8328d-fd05-4e63-9cbd-a6e3ec948964-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 685.105587] env[69367]: DEBUG nova.compute.manager [req-df4913bb-ec04-4f66-bb36-f86459a8c092 req-d17c9cc3-96bc-43b2-8c51-1c68668083bc service nova] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] No waiting events found dispatching network-vif-plugged-5401116f-daf2-4db0-b052-7bd1adb63cc1 {{(pid=69367) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 685.106882] env[69367]: WARNING nova.compute.manager [req-df4913bb-ec04-4f66-bb36-f86459a8c092 req-d17c9cc3-96bc-43b2-8c51-1c68668083bc service nova] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Received unexpected event network-vif-plugged-5401116f-daf2-4db0-b052-7bd1adb63cc1 for instance with vm_state building and task_state spawning. [ 685.106882] env[69367]: DEBUG nova.compute.manager [req-df4913bb-ec04-4f66-bb36-f86459a8c092 req-d17c9cc3-96bc-43b2-8c51-1c68668083bc service nova] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Received event network-changed-5401116f-daf2-4db0-b052-7bd1adb63cc1 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 685.106882] env[69367]: DEBUG nova.compute.manager [req-df4913bb-ec04-4f66-bb36-f86459a8c092 req-d17c9cc3-96bc-43b2-8c51-1c68668083bc service nova] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Refreshing instance network info cache due to event network-changed-5401116f-daf2-4db0-b052-7bd1adb63cc1. {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11772}} [ 685.106882] env[69367]: DEBUG oslo_concurrency.lockutils [req-df4913bb-ec04-4f66-bb36-f86459a8c092 req-d17c9cc3-96bc-43b2-8c51-1c68668083bc service nova] Acquiring lock "refresh_cache-d2f8328d-fd05-4e63-9cbd-a6e3ec948964" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 685.106882] env[69367]: DEBUG oslo_concurrency.lockutils [req-df4913bb-ec04-4f66-bb36-f86459a8c092 req-d17c9cc3-96bc-43b2-8c51-1c68668083bc service nova] Acquired lock "refresh_cache-d2f8328d-fd05-4e63-9cbd-a6e3ec948964" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 685.109684] env[69367]: DEBUG nova.network.neutron [req-df4913bb-ec04-4f66-bb36-f86459a8c092 req-d17c9cc3-96bc-43b2-8c51-1c68668083bc service nova] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Refreshing network info cache for port 5401116f-daf2-4db0-b052-7bd1adb63cc1 {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 685.143129] env[69367]: DEBUG oslo_vmware.api [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Task: {'id': task-4233853, 'name': PowerOnVM_Task, 'duration_secs': 1.004031} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.144831] env[69367]: DEBUG nova.scheduler.client.report [None req-6de81446-9515-41d1-9d8b-68fdd2bfc9f0 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 685.148397] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] [instance: 92c27615-d377-492f-a9db-ff45b2e71537] Powered on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 685.148397] env[69367]: INFO nova.compute.manager [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] [instance: 92c27615-d377-492f-a9db-ff45b2e71537] Took 14.00 seconds to spawn the instance on the hypervisor. [ 685.148397] env[69367]: DEBUG nova.compute.manager [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] [instance: 92c27615-d377-492f-a9db-ff45b2e71537] Checking state {{(pid=69367) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 685.152376] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b62775f-6a34-48a7-b411-b2b9d91d4f07 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.175207] env[69367]: DEBUG nova.scheduler.client.report [None req-6de81446-9515-41d1-9d8b-68fdd2bfc9f0 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 685.178149] env[69367]: DEBUG nova.compute.provider_tree [None req-6de81446-9515-41d1-9d8b-68fdd2bfc9f0 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 685.202570] env[69367]: DEBUG nova.scheduler.client.report [None req-6de81446-9515-41d1-9d8b-68fdd2bfc9f0 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 685.240079] env[69367]: DEBUG nova.scheduler.client.report [None req-6de81446-9515-41d1-9d8b-68fdd2bfc9f0 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 685.254851] env[69367]: DEBUG oslo_concurrency.lockutils [None req-feb72e4c-b567-4182-bef0-66f7e1a58d17 tempest-ServerDiagnosticsNegativeTest-1764795038 tempest-ServerDiagnosticsNegativeTest-1764795038-project-member] Lock "c7bc6ebd-d7fd-439a-829f-8f4bf2065623" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.886s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 685.283660] env[69367]: DEBUG oslo_vmware.api [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Task: {'id': task-4233854, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.375792] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91e9d1ea-2ff5-4ae3-a101-b6ada7c4acd5 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Acquiring lock "ab365570-ac29-4094-be4c-d49563a465c8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 685.376078] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91e9d1ea-2ff5-4ae3-a101-b6ada7c4acd5 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Lock "ab365570-ac29-4094-be4c-d49563a465c8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 685.376305] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91e9d1ea-2ff5-4ae3-a101-b6ada7c4acd5 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Acquiring lock "ab365570-ac29-4094-be4c-d49563a465c8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 685.376560] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91e9d1ea-2ff5-4ae3-a101-b6ada7c4acd5 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Lock "ab365570-ac29-4094-be4c-d49563a465c8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 685.376744] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91e9d1ea-2ff5-4ae3-a101-b6ada7c4acd5 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Lock "ab365570-ac29-4094-be4c-d49563a465c8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 685.379529] env[69367]: INFO nova.compute.manager [None req-91e9d1ea-2ff5-4ae3-a101-b6ada7c4acd5 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab365570-ac29-4094-be4c-d49563a465c8] Terminating instance [ 685.479738] env[69367]: DEBUG oslo_vmware.api [None req-02258f01-0f7a-47d5-8f19-e7e1d8fc8a57 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': task-4233855, 'name': PowerOffVM_Task, 'duration_secs': 0.3775} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.480121] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-02258f01-0f7a-47d5-8f19-e7e1d8fc8a57 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] Powered off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 685.480233] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-02258f01-0f7a-47d5-8f19-e7e1d8fc8a57 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] Unregistering the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 685.480684] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c343551d-e555-442e-aab1-9f75967cf63d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.570271] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-02258f01-0f7a-47d5-8f19-e7e1d8fc8a57 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] Unregistered the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 685.570271] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-02258f01-0f7a-47d5-8f19-e7e1d8fc8a57 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] Deleting contents of the VM from datastore datastore1 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 685.570501] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-02258f01-0f7a-47d5-8f19-e7e1d8fc8a57 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Deleting the datastore file [datastore1] c17525ee-d038-4c81-932b-ed74a6de6cb5 {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 685.570717] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f47bbf67-504f-4528-8f3a-ab94311f807a {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.578707] env[69367]: DEBUG oslo_vmware.api [None req-02258f01-0f7a-47d5-8f19-e7e1d8fc8a57 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Waiting for the task: (returnval){ [ 685.578707] env[69367]: value = "task-4233858" [ 685.578707] env[69367]: _type = "Task" [ 685.578707] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.594774] env[69367]: DEBUG oslo_vmware.api [None req-02258f01-0f7a-47d5-8f19-e7e1d8fc8a57 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': task-4233858, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.691612] env[69367]: INFO nova.compute.manager [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] [instance: 92c27615-d377-492f-a9db-ff45b2e71537] Took 42.87 seconds to build instance. [ 685.709842] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ada8470-3f36-431d-999c-2e7830f0db44 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.726734] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84fbe287-d206-4636-8ca1-13c7729b1d02 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.769316] env[69367]: DEBUG nova.compute.manager [None req-b74dbd34-b054-442c-9c38-e89bb046091c tempest-AttachInterfacesUnderV243Test-977744864 tempest-AttachInterfacesUnderV243Test-977744864-project-member] [instance: 250a50bf-c4b0-4997-9ce5-6dbeb617e9ed] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 685.782086] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2fa9eba-c1dd-4acc-ba2a-bc713884a98e {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.794870] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60338cce-b937-4431-877d-d8f5055cd5e8 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.799195] env[69367]: DEBUG oslo_vmware.api [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Task: {'id': task-4233854, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.649105} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.799719] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore1] d2f8328d-fd05-4e63-9cbd-a6e3ec948964/d2f8328d-fd05-4e63-9cbd-a6e3ec948964.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 685.799833] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Extending root virtual disk to 1048576 {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 685.800577] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ce732d07-e3ae-47c5-a926-8bed0e3f4f42 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.815567] env[69367]: DEBUG nova.compute.provider_tree [None req-6de81446-9515-41d1-9d8b-68fdd2bfc9f0 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 685.823814] env[69367]: DEBUG oslo_vmware.api [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Waiting for the task: (returnval){ [ 685.823814] env[69367]: value = "task-4233859" [ 685.823814] env[69367]: _type = "Task" [ 685.823814] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.840819] env[69367]: DEBUG oslo_vmware.api [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Task: {'id': task-4233859, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.889977] env[69367]: DEBUG nova.compute.manager [None req-91e9d1ea-2ff5-4ae3-a101-b6ada7c4acd5 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab365570-ac29-4094-be4c-d49563a465c8] Start destroying the instance on the hypervisor. {{(pid=69367) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 685.890275] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-91e9d1ea-2ff5-4ae3-a101-b6ada7c4acd5 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab365570-ac29-4094-be4c-d49563a465c8] Destroying instance {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 685.891220] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d0ba5f6-aeff-4f3b-9de5-ca8250623489 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.902538] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-91e9d1ea-2ff5-4ae3-a101-b6ada7c4acd5 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab365570-ac29-4094-be4c-d49563a465c8] Powering off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 685.902850] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-488a1a75-561c-4ba5-a913-8dc2e156235c {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.912264] env[69367]: DEBUG oslo_vmware.api [None req-91e9d1ea-2ff5-4ae3-a101-b6ada7c4acd5 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Waiting for the task: (returnval){ [ 685.912264] env[69367]: value = "task-4233860" [ 685.912264] env[69367]: _type = "Task" [ 685.912264] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.925564] env[69367]: DEBUG oslo_vmware.api [None req-91e9d1ea-2ff5-4ae3-a101-b6ada7c4acd5 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': task-4233860, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.093721] env[69367]: DEBUG oslo_vmware.api [None req-02258f01-0f7a-47d5-8f19-e7e1d8fc8a57 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': task-4233858, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.186558} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.094186] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-02258f01-0f7a-47d5-8f19-e7e1d8fc8a57 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Deleted the datastore file {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 686.097256] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-02258f01-0f7a-47d5-8f19-e7e1d8fc8a57 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] Deleted contents of the VM from datastore datastore1 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 686.097256] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-02258f01-0f7a-47d5-8f19-e7e1d8fc8a57 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] Instance destroyed {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 686.097256] env[69367]: INFO nova.compute.manager [None req-02258f01-0f7a-47d5-8f19-e7e1d8fc8a57 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] Took 1.16 seconds to destroy the instance on the hypervisor. [ 686.097256] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-02258f01-0f7a-47d5-8f19-e7e1d8fc8a57 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 686.097256] env[69367]: DEBUG nova.compute.manager [-] [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 686.097256] env[69367]: DEBUG nova.network.neutron [-] [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 686.123378] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2da95dcb-8d53-4ea0-a385-7bc1951f8313 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 686.198657] env[69367]: DEBUG oslo_concurrency.lockutils [None req-be4e3c2d-b3b5-427c-9eea-12d64490087d tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Lock "92c27615-d377-492f-a9db-ff45b2e71537" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 48.014s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 686.279873] env[69367]: DEBUG nova.network.neutron [req-df4913bb-ec04-4f66-bb36-f86459a8c092 req-d17c9cc3-96bc-43b2-8c51-1c68668083bc service nova] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Updated VIF entry in instance network info cache for port 5401116f-daf2-4db0-b052-7bd1adb63cc1. {{(pid=69367) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 686.280261] env[69367]: DEBUG nova.network.neutron [req-df4913bb-ec04-4f66-bb36-f86459a8c092 req-d17c9cc3-96bc-43b2-8c51-1c68668083bc service nova] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Updating instance_info_cache with network_info: [{"id": "5401116f-daf2-4db0-b052-7bd1adb63cc1", "address": "fa:16:3e:d1:24:7a", "network": {"id": "341b1aa4-f821-4feb-8230-bd176112b3c3", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-2139182269-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d73aa45fc35b44c6a8a2dd8ef127b974", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ea16b8c-ebf6-4bd4-ab67-ddb472e7b460", "external-id": "nsx-vlan-transportzone-156", "segmentation_id": 156, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5401116f-da", "ovs_interfaceid": "5401116f-daf2-4db0-b052-7bd1adb63cc1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 686.295921] env[69367]: DEBUG oslo_concurrency.lockutils [None req-b74dbd34-b054-442c-9c38-e89bb046091c tempest-AttachInterfacesUnderV243Test-977744864 tempest-AttachInterfacesUnderV243Test-977744864-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 686.342311] env[69367]: DEBUG oslo_vmware.api [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Task: {'id': task-4233859, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.099463} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.343284] env[69367]: ERROR nova.scheduler.client.report [None req-6de81446-9515-41d1-9d8b-68fdd2bfc9f0 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] [req-781fed81-3f27-48aa-bd39-e69ef204c19c] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-781fed81-3f27-48aa-bd39-e69ef204c19c"}]} [ 686.343651] env[69367]: DEBUG oslo_concurrency.lockutils [None req-6de81446-9515-41d1-9d8b-68fdd2bfc9f0 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.250s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 686.345690] env[69367]: ERROR nova.compute.manager [None req-6de81446-9515-41d1-9d8b-68fdd2bfc9f0 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] [instance: 5aac4b74-0c19-43e2-a2ce-5bbb6c731dcd] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 686.345690] env[69367]: ERROR nova.compute.manager [instance: 5aac4b74-0c19-43e2-a2ce-5bbb6c731dcd] Traceback (most recent call last): [ 686.345690] env[69367]: ERROR nova.compute.manager [instance: 5aac4b74-0c19-43e2-a2ce-5bbb6c731dcd] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 686.345690] env[69367]: ERROR nova.compute.manager [instance: 5aac4b74-0c19-43e2-a2ce-5bbb6c731dcd] yield [ 686.345690] env[69367]: ERROR nova.compute.manager [instance: 5aac4b74-0c19-43e2-a2ce-5bbb6c731dcd] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 686.345690] env[69367]: ERROR nova.compute.manager [instance: 5aac4b74-0c19-43e2-a2ce-5bbb6c731dcd] self.set_inventory_for_provider( [ 686.345690] env[69367]: ERROR nova.compute.manager [instance: 5aac4b74-0c19-43e2-a2ce-5bbb6c731dcd] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 686.345690] env[69367]: ERROR nova.compute.manager [instance: 5aac4b74-0c19-43e2-a2ce-5bbb6c731dcd] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 686.347044] env[69367]: ERROR nova.compute.manager [instance: 5aac4b74-0c19-43e2-a2ce-5bbb6c731dcd] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-781fed81-3f27-48aa-bd39-e69ef204c19c"}]} [ 686.347044] env[69367]: ERROR nova.compute.manager [instance: 5aac4b74-0c19-43e2-a2ce-5bbb6c731dcd] [ 686.347044] env[69367]: ERROR nova.compute.manager [instance: 5aac4b74-0c19-43e2-a2ce-5bbb6c731dcd] During handling of the above exception, another exception occurred: [ 686.347044] env[69367]: ERROR nova.compute.manager [instance: 5aac4b74-0c19-43e2-a2ce-5bbb6c731dcd] [ 686.347044] env[69367]: ERROR nova.compute.manager [instance: 5aac4b74-0c19-43e2-a2ce-5bbb6c731dcd] Traceback (most recent call last): [ 686.347044] env[69367]: ERROR nova.compute.manager [instance: 5aac4b74-0c19-43e2-a2ce-5bbb6c731dcd] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 686.347044] env[69367]: ERROR nova.compute.manager [instance: 5aac4b74-0c19-43e2-a2ce-5bbb6c731dcd] with self.rt.instance_claim(context, instance, node, allocs, [ 686.347044] env[69367]: ERROR nova.compute.manager [instance: 5aac4b74-0c19-43e2-a2ce-5bbb6c731dcd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 686.347044] env[69367]: ERROR nova.compute.manager [instance: 5aac4b74-0c19-43e2-a2ce-5bbb6c731dcd] return f(*args, **kwargs) [ 686.347316] env[69367]: ERROR nova.compute.manager [instance: 5aac4b74-0c19-43e2-a2ce-5bbb6c731dcd] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 686.347316] env[69367]: ERROR nova.compute.manager [instance: 5aac4b74-0c19-43e2-a2ce-5bbb6c731dcd] self._update(elevated, cn) [ 686.347316] env[69367]: ERROR nova.compute.manager [instance: 5aac4b74-0c19-43e2-a2ce-5bbb6c731dcd] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 686.347316] env[69367]: ERROR nova.compute.manager [instance: 5aac4b74-0c19-43e2-a2ce-5bbb6c731dcd] self._update_to_placement(context, compute_node, startup) [ 686.347316] env[69367]: ERROR nova.compute.manager [instance: 5aac4b74-0c19-43e2-a2ce-5bbb6c731dcd] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 686.347316] env[69367]: ERROR nova.compute.manager [instance: 5aac4b74-0c19-43e2-a2ce-5bbb6c731dcd] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 686.347316] env[69367]: ERROR nova.compute.manager [instance: 5aac4b74-0c19-43e2-a2ce-5bbb6c731dcd] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 686.347316] env[69367]: ERROR nova.compute.manager [instance: 5aac4b74-0c19-43e2-a2ce-5bbb6c731dcd] return attempt.get(self._wrap_exception) [ 686.347316] env[69367]: ERROR nova.compute.manager [instance: 5aac4b74-0c19-43e2-a2ce-5bbb6c731dcd] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 686.347316] env[69367]: ERROR nova.compute.manager [instance: 5aac4b74-0c19-43e2-a2ce-5bbb6c731dcd] six.reraise(self.value[0], self.value[1], self.value[2]) [ 686.347316] env[69367]: ERROR nova.compute.manager [instance: 5aac4b74-0c19-43e2-a2ce-5bbb6c731dcd] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 686.347316] env[69367]: ERROR nova.compute.manager [instance: 5aac4b74-0c19-43e2-a2ce-5bbb6c731dcd] raise value [ 686.347316] env[69367]: ERROR nova.compute.manager [instance: 5aac4b74-0c19-43e2-a2ce-5bbb6c731dcd] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 686.347624] env[69367]: ERROR nova.compute.manager [instance: 5aac4b74-0c19-43e2-a2ce-5bbb6c731dcd] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 686.347624] env[69367]: ERROR nova.compute.manager [instance: 5aac4b74-0c19-43e2-a2ce-5bbb6c731dcd] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 686.347624] env[69367]: ERROR nova.compute.manager [instance: 5aac4b74-0c19-43e2-a2ce-5bbb6c731dcd] self.reportclient.update_from_provider_tree( [ 686.347624] env[69367]: ERROR nova.compute.manager [instance: 5aac4b74-0c19-43e2-a2ce-5bbb6c731dcd] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 686.347624] env[69367]: ERROR nova.compute.manager [instance: 5aac4b74-0c19-43e2-a2ce-5bbb6c731dcd] with catch_all(pd.uuid): [ 686.347624] env[69367]: ERROR nova.compute.manager [instance: 5aac4b74-0c19-43e2-a2ce-5bbb6c731dcd] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 686.347624] env[69367]: ERROR nova.compute.manager [instance: 5aac4b74-0c19-43e2-a2ce-5bbb6c731dcd] self.gen.throw(typ, value, traceback) [ 686.347624] env[69367]: ERROR nova.compute.manager [instance: 5aac4b74-0c19-43e2-a2ce-5bbb6c731dcd] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 686.347624] env[69367]: ERROR nova.compute.manager [instance: 5aac4b74-0c19-43e2-a2ce-5bbb6c731dcd] raise exception.ResourceProviderSyncFailed() [ 686.347624] env[69367]: ERROR nova.compute.manager [instance: 5aac4b74-0c19-43e2-a2ce-5bbb6c731dcd] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 686.347624] env[69367]: ERROR nova.compute.manager [instance: 5aac4b74-0c19-43e2-a2ce-5bbb6c731dcd] [ 686.347886] env[69367]: DEBUG nova.compute.utils [None req-6de81446-9515-41d1-9d8b-68fdd2bfc9f0 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] [instance: 5aac4b74-0c19-43e2-a2ce-5bbb6c731dcd] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 686.347886] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Extended root virtual disk {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 686.347886] env[69367]: DEBUG oslo_concurrency.lockutils [None req-aabb4e8c-4072-4fce-bdec-9388128f54ff tempest-MigrationsAdminTest-1077393646 tempest-MigrationsAdminTest-1077393646-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.729s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 686.347886] env[69367]: INFO nova.compute.claims [None req-aabb4e8c-4072-4fce-bdec-9388128f54ff tempest-MigrationsAdminTest-1077393646 tempest-MigrationsAdminTest-1077393646-project-member] [instance: 27267edf-97f5-4238-8d9a-c2ddf0bb252c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 686.351160] env[69367]: DEBUG nova.compute.manager [None req-6de81446-9515-41d1-9d8b-68fdd2bfc9f0 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] [instance: 5aac4b74-0c19-43e2-a2ce-5bbb6c731dcd] Build of instance 5aac4b74-0c19-43e2-a2ce-5bbb6c731dcd was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 686.353213] env[69367]: DEBUG nova.compute.manager [None req-6de81446-9515-41d1-9d8b-68fdd2bfc9f0 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] [instance: 5aac4b74-0c19-43e2-a2ce-5bbb6c731dcd] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 686.353213] env[69367]: DEBUG oslo_concurrency.lockutils [None req-6de81446-9515-41d1-9d8b-68fdd2bfc9f0 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Acquiring lock "refresh_cache-5aac4b74-0c19-43e2-a2ce-5bbb6c731dcd" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 686.353213] env[69367]: DEBUG oslo_concurrency.lockutils [None req-6de81446-9515-41d1-9d8b-68fdd2bfc9f0 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Acquired lock "refresh_cache-5aac4b74-0c19-43e2-a2ce-5bbb6c731dcd" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 686.353213] env[69367]: DEBUG nova.network.neutron [None req-6de81446-9515-41d1-9d8b-68fdd2bfc9f0 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] [instance: 5aac4b74-0c19-43e2-a2ce-5bbb6c731dcd] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 686.353711] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03fddf6f-07fd-4ae6-be42-084a35f66ec0 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.385654] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Reconfiguring VM instance instance-00000018 to attach disk [datastore1] d2f8328d-fd05-4e63-9cbd-a6e3ec948964/d2f8328d-fd05-4e63-9cbd-a6e3ec948964.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 686.386336] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-85bf2eb1-782b-4f95-83a6-a853cac777fd {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.408828] env[69367]: DEBUG oslo_vmware.api [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Waiting for the task: (returnval){ [ 686.408828] env[69367]: value = "task-4233861" [ 686.408828] env[69367]: _type = "Task" [ 686.408828] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.421382] env[69367]: DEBUG oslo_vmware.api [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Task: {'id': task-4233861, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.430103] env[69367]: DEBUG oslo_vmware.api [None req-91e9d1ea-2ff5-4ae3-a101-b6ada7c4acd5 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': task-4233860, 'name': PowerOffVM_Task, 'duration_secs': 0.278096} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.430812] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-91e9d1ea-2ff5-4ae3-a101-b6ada7c4acd5 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab365570-ac29-4094-be4c-d49563a465c8] Powered off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 686.433190] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-91e9d1ea-2ff5-4ae3-a101-b6ada7c4acd5 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab365570-ac29-4094-be4c-d49563a465c8] Unregistering the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 686.433190] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c669a297-7859-49b9-a2d5-a4bbeb1aaa8a {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.501716] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-91e9d1ea-2ff5-4ae3-a101-b6ada7c4acd5 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab365570-ac29-4094-be4c-d49563a465c8] Unregistered the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 686.501974] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-91e9d1ea-2ff5-4ae3-a101-b6ada7c4acd5 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab365570-ac29-4094-be4c-d49563a465c8] Deleting contents of the VM from datastore datastore1 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 686.502195] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-91e9d1ea-2ff5-4ae3-a101-b6ada7c4acd5 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Deleting the datastore file [datastore1] ab365570-ac29-4094-be4c-d49563a465c8 {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 686.502463] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3b8e0944-29ea-4408-aaf4-ff3a9a5a1364 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.513791] env[69367]: DEBUG oslo_vmware.api [None req-91e9d1ea-2ff5-4ae3-a101-b6ada7c4acd5 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Waiting for the task: (returnval){ [ 686.513791] env[69367]: value = "task-4233863" [ 686.513791] env[69367]: _type = "Task" [ 686.513791] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.523220] env[69367]: DEBUG oslo_vmware.api [None req-91e9d1ea-2ff5-4ae3-a101-b6ada7c4acd5 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': task-4233863, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.713298] env[69367]: DEBUG oslo_concurrency.lockutils [None req-be828dd9-f06a-4267-b660-1a75113835c1 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Acquiring lock "92c27615-d377-492f-a9db-ff45b2e71537" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 686.713699] env[69367]: DEBUG oslo_concurrency.lockutils [None req-be828dd9-f06a-4267-b660-1a75113835c1 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Lock "92c27615-d377-492f-a9db-ff45b2e71537" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 686.713745] env[69367]: DEBUG oslo_concurrency.lockutils [None req-be828dd9-f06a-4267-b660-1a75113835c1 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Acquiring lock "92c27615-d377-492f-a9db-ff45b2e71537-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 686.713914] env[69367]: DEBUG oslo_concurrency.lockutils [None req-be828dd9-f06a-4267-b660-1a75113835c1 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Lock "92c27615-d377-492f-a9db-ff45b2e71537-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 686.714103] env[69367]: DEBUG oslo_concurrency.lockutils [None req-be828dd9-f06a-4267-b660-1a75113835c1 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Lock "92c27615-d377-492f-a9db-ff45b2e71537-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 686.715883] env[69367]: DEBUG nova.compute.manager [None req-785c96af-d561-41aa-8b94-aba2c186e78c tempest-ServerActionsTestOtherA-1360477532 tempest-ServerActionsTestOtherA-1360477532-project-member] [instance: 26418f26-07ae-45e4-87d6-bdcf99674fb5] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 686.719145] env[69367]: INFO nova.compute.manager [None req-be828dd9-f06a-4267-b660-1a75113835c1 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] [instance: 92c27615-d377-492f-a9db-ff45b2e71537] Terminating instance [ 686.783564] env[69367]: DEBUG oslo_concurrency.lockutils [req-df4913bb-ec04-4f66-bb36-f86459a8c092 req-d17c9cc3-96bc-43b2-8c51-1c68668083bc service nova] Releasing lock "refresh_cache-d2f8328d-fd05-4e63-9cbd-a6e3ec948964" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 686.783824] env[69367]: DEBUG nova.compute.manager [req-df4913bb-ec04-4f66-bb36-f86459a8c092 req-d17c9cc3-96bc-43b2-8c51-1c68668083bc service nova] [instance: ba4d981a-19f7-41ef-b7d1-a3f3830fe725] Received event network-vif-deleted-40ba702d-0ae2-48ae-acc0-37f002e4ef6a {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 686.899210] env[69367]: DEBUG nova.network.neutron [None req-6de81446-9515-41d1-9d8b-68fdd2bfc9f0 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] [instance: 5aac4b74-0c19-43e2-a2ce-5bbb6c731dcd] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 686.920613] env[69367]: DEBUG oslo_vmware.api [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Task: {'id': task-4233861, 'name': ReconfigVM_Task, 'duration_secs': 0.348293} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.920954] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Reconfigured VM instance instance-00000018 to attach disk [datastore1] d2f8328d-fd05-4e63-9cbd-a6e3ec948964/d2f8328d-fd05-4e63-9cbd-a6e3ec948964.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 686.921714] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cb3fb7dd-59cb-4f14-b5ec-1282a26c01d7 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.934902] env[69367]: DEBUG oslo_vmware.api [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Waiting for the task: (returnval){ [ 686.934902] env[69367]: value = "task-4233864" [ 686.934902] env[69367]: _type = "Task" [ 686.934902] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.949311] env[69367]: DEBUG oslo_vmware.api [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Task: {'id': task-4233864, 'name': Rename_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.027418] env[69367]: DEBUG oslo_vmware.api [None req-91e9d1ea-2ff5-4ae3-a101-b6ada7c4acd5 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': task-4233863, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.16851} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.030026] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-91e9d1ea-2ff5-4ae3-a101-b6ada7c4acd5 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Deleted the datastore file {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 687.030026] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-91e9d1ea-2ff5-4ae3-a101-b6ada7c4acd5 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab365570-ac29-4094-be4c-d49563a465c8] Deleted contents of the VM from datastore datastore1 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 687.030026] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-91e9d1ea-2ff5-4ae3-a101-b6ada7c4acd5 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab365570-ac29-4094-be4c-d49563a465c8] Instance destroyed {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 687.030026] env[69367]: INFO nova.compute.manager [None req-91e9d1ea-2ff5-4ae3-a101-b6ada7c4acd5 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab365570-ac29-4094-be4c-d49563a465c8] Took 1.14 seconds to destroy the instance on the hypervisor. [ 687.030026] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-91e9d1ea-2ff5-4ae3-a101-b6ada7c4acd5 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 687.030588] env[69367]: DEBUG nova.network.neutron [None req-6de81446-9515-41d1-9d8b-68fdd2bfc9f0 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] [instance: 5aac4b74-0c19-43e2-a2ce-5bbb6c731dcd] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 687.031590] env[69367]: DEBUG nova.compute.manager [-] [instance: ab365570-ac29-4094-be4c-d49563a465c8] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 687.031692] env[69367]: DEBUG nova.network.neutron [-] [instance: ab365570-ac29-4094-be4c-d49563a465c8] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 687.230745] env[69367]: DEBUG nova.compute.manager [None req-be828dd9-f06a-4267-b660-1a75113835c1 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] [instance: 92c27615-d377-492f-a9db-ff45b2e71537] Start destroying the instance on the hypervisor. {{(pid=69367) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 687.230979] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-be828dd9-f06a-4267-b660-1a75113835c1 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] [instance: 92c27615-d377-492f-a9db-ff45b2e71537] Destroying instance {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 687.232081] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c3f5787-1601-402b-8798-eb8d66344e1d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.244979] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-be828dd9-f06a-4267-b660-1a75113835c1 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] [instance: 92c27615-d377-492f-a9db-ff45b2e71537] Powering off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 687.245331] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-33bca3ab-f25d-47af-8cd8-bf27c9901a6e {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.248087] env[69367]: DEBUG oslo_concurrency.lockutils [None req-785c96af-d561-41aa-8b94-aba2c186e78c tempest-ServerActionsTestOtherA-1360477532 tempest-ServerActionsTestOtherA-1360477532-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 687.254393] env[69367]: DEBUG oslo_vmware.api [None req-be828dd9-f06a-4267-b660-1a75113835c1 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Waiting for the task: (returnval){ [ 687.254393] env[69367]: value = "task-4233865" [ 687.254393] env[69367]: _type = "Task" [ 687.254393] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.264966] env[69367]: DEBUG oslo_vmware.api [None req-be828dd9-f06a-4267-b660-1a75113835c1 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Task: {'id': task-4233865, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.276065] env[69367]: DEBUG nova.network.neutron [-] [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 687.380050] env[69367]: DEBUG nova.scheduler.client.report [None req-aabb4e8c-4072-4fce-bdec-9388128f54ff tempest-MigrationsAdminTest-1077393646 tempest-MigrationsAdminTest-1077393646-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 687.403272] env[69367]: DEBUG nova.scheduler.client.report [None req-aabb4e8c-4072-4fce-bdec-9388128f54ff tempest-MigrationsAdminTest-1077393646 tempest-MigrationsAdminTest-1077393646-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 687.403918] env[69367]: DEBUG nova.compute.provider_tree [None req-aabb4e8c-4072-4fce-bdec-9388128f54ff tempest-MigrationsAdminTest-1077393646 tempest-MigrationsAdminTest-1077393646-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 687.429333] env[69367]: DEBUG nova.scheduler.client.report [None req-aabb4e8c-4072-4fce-bdec-9388128f54ff tempest-MigrationsAdminTest-1077393646 tempest-MigrationsAdminTest-1077393646-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 687.452276] env[69367]: DEBUG oslo_vmware.api [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Task: {'id': task-4233864, 'name': Rename_Task, 'duration_secs': 0.166387} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.452276] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Powering on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 687.452668] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cad4dd62-2361-4588-aeda-e3144f9ef26d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.463512] env[69367]: DEBUG oslo_vmware.api [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Waiting for the task: (returnval){ [ 687.463512] env[69367]: value = "task-4233866" [ 687.463512] env[69367]: _type = "Task" [ 687.463512] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.469091] env[69367]: DEBUG nova.scheduler.client.report [None req-aabb4e8c-4072-4fce-bdec-9388128f54ff tempest-MigrationsAdminTest-1077393646 tempest-MigrationsAdminTest-1077393646-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 687.481011] env[69367]: DEBUG oslo_vmware.api [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Task: {'id': task-4233866, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.535747] env[69367]: DEBUG oslo_concurrency.lockutils [None req-6de81446-9515-41d1-9d8b-68fdd2bfc9f0 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Releasing lock "refresh_cache-5aac4b74-0c19-43e2-a2ce-5bbb6c731dcd" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 687.536860] env[69367]: DEBUG nova.compute.manager [None req-6de81446-9515-41d1-9d8b-68fdd2bfc9f0 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 687.536860] env[69367]: DEBUG nova.compute.manager [None req-6de81446-9515-41d1-9d8b-68fdd2bfc9f0 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] [instance: 5aac4b74-0c19-43e2-a2ce-5bbb6c731dcd] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 687.536860] env[69367]: DEBUG nova.network.neutron [None req-6de81446-9515-41d1-9d8b-68fdd2bfc9f0 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] [instance: 5aac4b74-0c19-43e2-a2ce-5bbb6c731dcd] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 687.597357] env[69367]: DEBUG nova.network.neutron [None req-6de81446-9515-41d1-9d8b-68fdd2bfc9f0 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] [instance: 5aac4b74-0c19-43e2-a2ce-5bbb6c731dcd] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 687.769069] env[69367]: DEBUG oslo_vmware.api [None req-be828dd9-f06a-4267-b660-1a75113835c1 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Task: {'id': task-4233865, 'name': PowerOffVM_Task, 'duration_secs': 0.224834} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.769591] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-be828dd9-f06a-4267-b660-1a75113835c1 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] [instance: 92c27615-d377-492f-a9db-ff45b2e71537] Powered off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 687.770879] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-be828dd9-f06a-4267-b660-1a75113835c1 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] [instance: 92c27615-d377-492f-a9db-ff45b2e71537] Unregistering the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 687.771198] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3eeee938-eb4c-4e21-bad0-8c202d129b25 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.778431] env[69367]: INFO nova.compute.manager [-] [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] Took 1.68 seconds to deallocate network for instance. [ 687.941831] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24d27890-4a0c-42ba-819c-fd4157a106ef {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.953939] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-535d3854-2ba1-43cd-acfa-14327363f3c6 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.995286] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bb959e5-f0cd-41a5-95bd-50bba7d18408 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.011295] env[69367]: DEBUG oslo_vmware.api [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Task: {'id': task-4233866, 'name': PowerOnVM_Task, 'duration_secs': 0.483332} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.011644] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Powered on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 688.011850] env[69367]: INFO nova.compute.manager [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Took 11.15 seconds to spawn the instance on the hypervisor. [ 688.012089] env[69367]: DEBUG nova.compute.manager [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Checking state {{(pid=69367) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 688.013391] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc6f9218-7981-4b84-b3c6-17dd5ceab774 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.018965] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7aaecf4-a159-4b99-92d6-b26aef9ea5d1 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.033992] env[69367]: DEBUG nova.compute.provider_tree [None req-aabb4e8c-4072-4fce-bdec-9388128f54ff tempest-MigrationsAdminTest-1077393646 tempest-MigrationsAdminTest-1077393646-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 688.055133] env[69367]: DEBUG nova.compute.manager [req-299f1d16-409c-43cb-9367-aa92e2ec280b req-51d27c46-da18-45eb-a087-c8983feb8054 service nova] [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] Received event network-vif-deleted-a1308824-de04-4736-b17a-57bf0eae53ff {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 688.084423] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-be828dd9-f06a-4267-b660-1a75113835c1 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] [instance: 92c27615-d377-492f-a9db-ff45b2e71537] Unregistered the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 688.084757] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-be828dd9-f06a-4267-b660-1a75113835c1 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] [instance: 92c27615-d377-492f-a9db-ff45b2e71537] Deleting contents of the VM from datastore datastore1 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 688.084991] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-be828dd9-f06a-4267-b660-1a75113835c1 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Deleting the datastore file [datastore1] 92c27615-d377-492f-a9db-ff45b2e71537 {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 688.085272] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2d2ade70-7ebb-45aa-b851-d047a37504db {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.095380] env[69367]: DEBUG oslo_vmware.api [None req-be828dd9-f06a-4267-b660-1a75113835c1 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Waiting for the task: (returnval){ [ 688.095380] env[69367]: value = "task-4233869" [ 688.095380] env[69367]: _type = "Task" [ 688.095380] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.103391] env[69367]: DEBUG nova.network.neutron [None req-6de81446-9515-41d1-9d8b-68fdd2bfc9f0 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] [instance: 5aac4b74-0c19-43e2-a2ce-5bbb6c731dcd] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 688.115271] env[69367]: DEBUG oslo_vmware.api [None req-be828dd9-f06a-4267-b660-1a75113835c1 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Task: {'id': task-4233869, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.226120] env[69367]: DEBUG oslo_concurrency.lockutils [None req-21dbec28-a61b-45fb-99b0-7b920d8d1837 tempest-ServerExternalEventsTest-336054206 tempest-ServerExternalEventsTest-336054206-project-member] Acquiring lock "d785944c-d65e-4a9e-91e5-0c0911b25227" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 688.226378] env[69367]: DEBUG oslo_concurrency.lockutils [None req-21dbec28-a61b-45fb-99b0-7b920d8d1837 tempest-ServerExternalEventsTest-336054206 tempest-ServerExternalEventsTest-336054206-project-member] Lock "d785944c-d65e-4a9e-91e5-0c0911b25227" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 688.244463] env[69367]: DEBUG nova.network.neutron [-] [instance: ab365570-ac29-4094-be4c-d49563a465c8] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 688.287668] env[69367]: DEBUG oslo_concurrency.lockutils [None req-02258f01-0f7a-47d5-8f19-e7e1d8fc8a57 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 688.554174] env[69367]: INFO nova.compute.manager [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Took 43.87 seconds to build instance. [ 688.571502] env[69367]: ERROR nova.scheduler.client.report [None req-aabb4e8c-4072-4fce-bdec-9388128f54ff tempest-MigrationsAdminTest-1077393646 tempest-MigrationsAdminTest-1077393646-project-member] [req-0ee9d928-8733-4bb3-96a0-bf4e44cbe64b] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-0ee9d928-8733-4bb3-96a0-bf4e44cbe64b"}]} [ 688.571911] env[69367]: DEBUG oslo_concurrency.lockutils [None req-aabb4e8c-4072-4fce-bdec-9388128f54ff tempest-MigrationsAdminTest-1077393646 tempest-MigrationsAdminTest-1077393646-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.225s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 688.572513] env[69367]: ERROR nova.compute.manager [None req-aabb4e8c-4072-4fce-bdec-9388128f54ff tempest-MigrationsAdminTest-1077393646 tempest-MigrationsAdminTest-1077393646-project-member] [instance: 27267edf-97f5-4238-8d9a-c2ddf0bb252c] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 688.572513] env[69367]: ERROR nova.compute.manager [instance: 27267edf-97f5-4238-8d9a-c2ddf0bb252c] Traceback (most recent call last): [ 688.572513] env[69367]: ERROR nova.compute.manager [instance: 27267edf-97f5-4238-8d9a-c2ddf0bb252c] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 688.572513] env[69367]: ERROR nova.compute.manager [instance: 27267edf-97f5-4238-8d9a-c2ddf0bb252c] yield [ 688.572513] env[69367]: ERROR nova.compute.manager [instance: 27267edf-97f5-4238-8d9a-c2ddf0bb252c] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 688.572513] env[69367]: ERROR nova.compute.manager [instance: 27267edf-97f5-4238-8d9a-c2ddf0bb252c] self.set_inventory_for_provider( [ 688.572513] env[69367]: ERROR nova.compute.manager [instance: 27267edf-97f5-4238-8d9a-c2ddf0bb252c] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 688.572513] env[69367]: ERROR nova.compute.manager [instance: 27267edf-97f5-4238-8d9a-c2ddf0bb252c] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 688.572721] env[69367]: ERROR nova.compute.manager [instance: 27267edf-97f5-4238-8d9a-c2ddf0bb252c] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-0ee9d928-8733-4bb3-96a0-bf4e44cbe64b"}]} [ 688.572721] env[69367]: ERROR nova.compute.manager [instance: 27267edf-97f5-4238-8d9a-c2ddf0bb252c] [ 688.572721] env[69367]: ERROR nova.compute.manager [instance: 27267edf-97f5-4238-8d9a-c2ddf0bb252c] During handling of the above exception, another exception occurred: [ 688.572721] env[69367]: ERROR nova.compute.manager [instance: 27267edf-97f5-4238-8d9a-c2ddf0bb252c] [ 688.572721] env[69367]: ERROR nova.compute.manager [instance: 27267edf-97f5-4238-8d9a-c2ddf0bb252c] Traceback (most recent call last): [ 688.572721] env[69367]: ERROR nova.compute.manager [instance: 27267edf-97f5-4238-8d9a-c2ddf0bb252c] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 688.572721] env[69367]: ERROR nova.compute.manager [instance: 27267edf-97f5-4238-8d9a-c2ddf0bb252c] with self.rt.instance_claim(context, instance, node, allocs, [ 688.572721] env[69367]: ERROR nova.compute.manager [instance: 27267edf-97f5-4238-8d9a-c2ddf0bb252c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 688.572721] env[69367]: ERROR nova.compute.manager [instance: 27267edf-97f5-4238-8d9a-c2ddf0bb252c] return f(*args, **kwargs) [ 688.573011] env[69367]: ERROR nova.compute.manager [instance: 27267edf-97f5-4238-8d9a-c2ddf0bb252c] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 688.573011] env[69367]: ERROR nova.compute.manager [instance: 27267edf-97f5-4238-8d9a-c2ddf0bb252c] self._update(elevated, cn) [ 688.573011] env[69367]: ERROR nova.compute.manager [instance: 27267edf-97f5-4238-8d9a-c2ddf0bb252c] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 688.573011] env[69367]: ERROR nova.compute.manager [instance: 27267edf-97f5-4238-8d9a-c2ddf0bb252c] self._update_to_placement(context, compute_node, startup) [ 688.573011] env[69367]: ERROR nova.compute.manager [instance: 27267edf-97f5-4238-8d9a-c2ddf0bb252c] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 688.573011] env[69367]: ERROR nova.compute.manager [instance: 27267edf-97f5-4238-8d9a-c2ddf0bb252c] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 688.573011] env[69367]: ERROR nova.compute.manager [instance: 27267edf-97f5-4238-8d9a-c2ddf0bb252c] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 688.573011] env[69367]: ERROR nova.compute.manager [instance: 27267edf-97f5-4238-8d9a-c2ddf0bb252c] return attempt.get(self._wrap_exception) [ 688.573011] env[69367]: ERROR nova.compute.manager [instance: 27267edf-97f5-4238-8d9a-c2ddf0bb252c] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 688.573011] env[69367]: ERROR nova.compute.manager [instance: 27267edf-97f5-4238-8d9a-c2ddf0bb252c] six.reraise(self.value[0], self.value[1], self.value[2]) [ 688.573011] env[69367]: ERROR nova.compute.manager [instance: 27267edf-97f5-4238-8d9a-c2ddf0bb252c] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 688.573011] env[69367]: ERROR nova.compute.manager [instance: 27267edf-97f5-4238-8d9a-c2ddf0bb252c] raise value [ 688.573011] env[69367]: ERROR nova.compute.manager [instance: 27267edf-97f5-4238-8d9a-c2ddf0bb252c] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 688.573352] env[69367]: ERROR nova.compute.manager [instance: 27267edf-97f5-4238-8d9a-c2ddf0bb252c] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 688.573352] env[69367]: ERROR nova.compute.manager [instance: 27267edf-97f5-4238-8d9a-c2ddf0bb252c] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 688.573352] env[69367]: ERROR nova.compute.manager [instance: 27267edf-97f5-4238-8d9a-c2ddf0bb252c] self.reportclient.update_from_provider_tree( [ 688.573352] env[69367]: ERROR nova.compute.manager [instance: 27267edf-97f5-4238-8d9a-c2ddf0bb252c] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 688.573352] env[69367]: ERROR nova.compute.manager [instance: 27267edf-97f5-4238-8d9a-c2ddf0bb252c] with catch_all(pd.uuid): [ 688.573352] env[69367]: ERROR nova.compute.manager [instance: 27267edf-97f5-4238-8d9a-c2ddf0bb252c] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 688.573352] env[69367]: ERROR nova.compute.manager [instance: 27267edf-97f5-4238-8d9a-c2ddf0bb252c] self.gen.throw(typ, value, traceback) [ 688.573352] env[69367]: ERROR nova.compute.manager [instance: 27267edf-97f5-4238-8d9a-c2ddf0bb252c] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 688.573352] env[69367]: ERROR nova.compute.manager [instance: 27267edf-97f5-4238-8d9a-c2ddf0bb252c] raise exception.ResourceProviderSyncFailed() [ 688.573352] env[69367]: ERROR nova.compute.manager [instance: 27267edf-97f5-4238-8d9a-c2ddf0bb252c] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 688.573352] env[69367]: ERROR nova.compute.manager [instance: 27267edf-97f5-4238-8d9a-c2ddf0bb252c] [ 688.573596] env[69367]: DEBUG nova.compute.utils [None req-aabb4e8c-4072-4fce-bdec-9388128f54ff tempest-MigrationsAdminTest-1077393646 tempest-MigrationsAdminTest-1077393646-project-member] [instance: 27267edf-97f5-4238-8d9a-c2ddf0bb252c] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 688.578393] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0ff19e7d-e04d-4e0d-8d1c-5d315e749134 tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.034s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 688.578393] env[69367]: INFO nova.compute.claims [None req-0ff19e7d-e04d-4e0d-8d1c-5d315e749134 tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: 65fcdf23-421a-45c1-880e-a536ec9fbdfd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 688.581665] env[69367]: DEBUG nova.compute.manager [None req-aabb4e8c-4072-4fce-bdec-9388128f54ff tempest-MigrationsAdminTest-1077393646 tempest-MigrationsAdminTest-1077393646-project-member] [instance: 27267edf-97f5-4238-8d9a-c2ddf0bb252c] Build of instance 27267edf-97f5-4238-8d9a-c2ddf0bb252c was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 688.582208] env[69367]: DEBUG nova.compute.manager [None req-aabb4e8c-4072-4fce-bdec-9388128f54ff tempest-MigrationsAdminTest-1077393646 tempest-MigrationsAdminTest-1077393646-project-member] [instance: 27267edf-97f5-4238-8d9a-c2ddf0bb252c] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 688.582442] env[69367]: DEBUG oslo_concurrency.lockutils [None req-aabb4e8c-4072-4fce-bdec-9388128f54ff tempest-MigrationsAdminTest-1077393646 tempest-MigrationsAdminTest-1077393646-project-member] Acquiring lock "refresh_cache-27267edf-97f5-4238-8d9a-c2ddf0bb252c" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 688.582588] env[69367]: DEBUG oslo_concurrency.lockutils [None req-aabb4e8c-4072-4fce-bdec-9388128f54ff tempest-MigrationsAdminTest-1077393646 tempest-MigrationsAdminTest-1077393646-project-member] Acquired lock "refresh_cache-27267edf-97f5-4238-8d9a-c2ddf0bb252c" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 688.582744] env[69367]: DEBUG nova.network.neutron [None req-aabb4e8c-4072-4fce-bdec-9388128f54ff tempest-MigrationsAdminTest-1077393646 tempest-MigrationsAdminTest-1077393646-project-member] [instance: 27267edf-97f5-4238-8d9a-c2ddf0bb252c] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 688.615898] env[69367]: INFO nova.compute.manager [None req-6de81446-9515-41d1-9d8b-68fdd2bfc9f0 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] [instance: 5aac4b74-0c19-43e2-a2ce-5bbb6c731dcd] Took 1.08 seconds to deallocate network for instance. [ 688.620899] env[69367]: DEBUG oslo_vmware.api [None req-be828dd9-f06a-4267-b660-1a75113835c1 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Task: {'id': task-4233869, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.2096} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.621163] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-be828dd9-f06a-4267-b660-1a75113835c1 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Deleted the datastore file {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 688.621671] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-be828dd9-f06a-4267-b660-1a75113835c1 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] [instance: 92c27615-d377-492f-a9db-ff45b2e71537] Deleted contents of the VM from datastore datastore1 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 688.621727] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-be828dd9-f06a-4267-b660-1a75113835c1 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] [instance: 92c27615-d377-492f-a9db-ff45b2e71537] Instance destroyed {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 688.622070] env[69367]: INFO nova.compute.manager [None req-be828dd9-f06a-4267-b660-1a75113835c1 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] [instance: 92c27615-d377-492f-a9db-ff45b2e71537] Took 1.39 seconds to destroy the instance on the hypervisor. [ 688.625982] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-be828dd9-f06a-4267-b660-1a75113835c1 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 688.625982] env[69367]: DEBUG nova.compute.manager [-] [instance: 92c27615-d377-492f-a9db-ff45b2e71537] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 688.626183] env[69367]: DEBUG nova.network.neutron [-] [instance: 92c27615-d377-492f-a9db-ff45b2e71537] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 688.749031] env[69367]: INFO nova.compute.manager [-] [instance: ab365570-ac29-4094-be4c-d49563a465c8] Took 1.72 seconds to deallocate network for instance. [ 689.059731] env[69367]: DEBUG oslo_concurrency.lockutils [None req-1a0ea38d-dfc4-4cbb-a9f0-f5ce46f1788e tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Lock "d2f8328d-fd05-4e63-9cbd-a6e3ec948964" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 49.874s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 689.117307] env[69367]: DEBUG oslo_concurrency.lockutils [None req-b7f7cb48-f31d-4f35-8ea5-66b5e5394715 tempest-VolumesAdminNegativeTest-6889650 tempest-VolumesAdminNegativeTest-6889650-project-member] Acquiring lock "de68478e-475a-45ef-9eed-44904fcfc1fd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 689.117703] env[69367]: DEBUG oslo_concurrency.lockutils [None req-b7f7cb48-f31d-4f35-8ea5-66b5e5394715 tempest-VolumesAdminNegativeTest-6889650 tempest-VolumesAdminNegativeTest-6889650-project-member] Lock "de68478e-475a-45ef-9eed-44904fcfc1fd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 689.120267] env[69367]: DEBUG nova.network.neutron [None req-aabb4e8c-4072-4fce-bdec-9388128f54ff tempest-MigrationsAdminTest-1077393646 tempest-MigrationsAdminTest-1077393646-project-member] [instance: 27267edf-97f5-4238-8d9a-c2ddf0bb252c] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 689.242908] env[69367]: DEBUG nova.network.neutron [None req-aabb4e8c-4072-4fce-bdec-9388128f54ff tempest-MigrationsAdminTest-1077393646 tempest-MigrationsAdminTest-1077393646-project-member] [instance: 27267edf-97f5-4238-8d9a-c2ddf0bb252c] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 689.259126] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91e9d1ea-2ff5-4ae3-a101-b6ada7c4acd5 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 689.562800] env[69367]: DEBUG nova.compute.manager [None req-bec6056e-20f1-4df8-b299-df21fda76884 tempest-ServersV294TestFqdnHostnames-2008691314 tempest-ServersV294TestFqdnHostnames-2008691314-project-member] [instance: 73d75c52-7ac9-4a28-8bfd-855fba7950b6] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 689.620406] env[69367]: DEBUG nova.scheduler.client.report [None req-0ff19e7d-e04d-4e0d-8d1c-5d315e749134 tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 689.641218] env[69367]: DEBUG nova.scheduler.client.report [None req-0ff19e7d-e04d-4e0d-8d1c-5d315e749134 tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 689.642170] env[69367]: DEBUG nova.compute.provider_tree [None req-0ff19e7d-e04d-4e0d-8d1c-5d315e749134 tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 689.655303] env[69367]: INFO nova.scheduler.client.report [None req-6de81446-9515-41d1-9d8b-68fdd2bfc9f0 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Deleted allocations for instance 5aac4b74-0c19-43e2-a2ce-5bbb6c731dcd [ 689.673734] env[69367]: DEBUG nova.scheduler.client.report [None req-0ff19e7d-e04d-4e0d-8d1c-5d315e749134 tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 689.701552] env[69367]: DEBUG nova.scheduler.client.report [None req-0ff19e7d-e04d-4e0d-8d1c-5d315e749134 tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 689.744748] env[69367]: DEBUG oslo_concurrency.lockutils [None req-aabb4e8c-4072-4fce-bdec-9388128f54ff tempest-MigrationsAdminTest-1077393646 tempest-MigrationsAdminTest-1077393646-project-member] Releasing lock "refresh_cache-27267edf-97f5-4238-8d9a-c2ddf0bb252c" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 689.746172] env[69367]: DEBUG nova.compute.manager [None req-aabb4e8c-4072-4fce-bdec-9388128f54ff tempest-MigrationsAdminTest-1077393646 tempest-MigrationsAdminTest-1077393646-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 689.746360] env[69367]: DEBUG nova.compute.manager [None req-aabb4e8c-4072-4fce-bdec-9388128f54ff tempest-MigrationsAdminTest-1077393646 tempest-MigrationsAdminTest-1077393646-project-member] [instance: 27267edf-97f5-4238-8d9a-c2ddf0bb252c] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 689.746536] env[69367]: DEBUG nova.network.neutron [None req-aabb4e8c-4072-4fce-bdec-9388128f54ff tempest-MigrationsAdminTest-1077393646 tempest-MigrationsAdminTest-1077393646-project-member] [instance: 27267edf-97f5-4238-8d9a-c2ddf0bb252c] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 689.792916] env[69367]: DEBUG nova.network.neutron [None req-aabb4e8c-4072-4fce-bdec-9388128f54ff tempest-MigrationsAdminTest-1077393646 tempest-MigrationsAdminTest-1077393646-project-member] [instance: 27267edf-97f5-4238-8d9a-c2ddf0bb252c] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 689.796227] env[69367]: DEBUG nova.network.neutron [-] [instance: 92c27615-d377-492f-a9db-ff45b2e71537] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 690.036547] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Acquiring lock "011ab7de-98a7-41fc-9e05-e71965c73c09" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 690.037009] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Lock "011ab7de-98a7-41fc-9e05-e71965c73c09" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 690.091754] env[69367]: DEBUG oslo_concurrency.lockutils [None req-bec6056e-20f1-4df8-b299-df21fda76884 tempest-ServersV294TestFqdnHostnames-2008691314 tempest-ServersV294TestFqdnHostnames-2008691314-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 690.177836] env[69367]: DEBUG oslo_concurrency.lockutils [None req-6de81446-9515-41d1-9d8b-68fdd2bfc9f0 tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Lock "5aac4b74-0c19-43e2-a2ce-5bbb6c731dcd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.996s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 690.178899] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d4b1c58-9b72-473d-9eef-5f2b077b933b {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.188474] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bee62f5-b994-4207-a87f-f44ef6528839 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.226293] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65898493-85c2-448b-a7f4-9f117d9142be {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.235929] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdd7791a-1829-473f-bde3-1e167eb80740 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.254079] env[69367]: DEBUG nova.compute.provider_tree [None req-0ff19e7d-e04d-4e0d-8d1c-5d315e749134 tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 690.296559] env[69367]: DEBUG nova.network.neutron [None req-aabb4e8c-4072-4fce-bdec-9388128f54ff tempest-MigrationsAdminTest-1077393646 tempest-MigrationsAdminTest-1077393646-project-member] [instance: 27267edf-97f5-4238-8d9a-c2ddf0bb252c] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 690.298026] env[69367]: INFO nova.compute.manager [-] [instance: 92c27615-d377-492f-a9db-ff45b2e71537] Took 1.67 seconds to deallocate network for instance. [ 690.615442] env[69367]: DEBUG nova.compute.manager [req-0104d05e-5d71-42d7-b72f-59f7dc6a7f8d req-435c220a-7675-4fb2-9b9d-2d08f80b9150 service nova] [instance: ab365570-ac29-4094-be4c-d49563a465c8] Received event network-vif-deleted-6de8ad4d-1ee6-4190-bcaa-941184f740e1 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 690.616224] env[69367]: DEBUG nova.compute.manager [req-0104d05e-5d71-42d7-b72f-59f7dc6a7f8d req-435c220a-7675-4fb2-9b9d-2d08f80b9150 service nova] [instance: 92c27615-d377-492f-a9db-ff45b2e71537] Received event network-vif-deleted-495ec91b-986e-4b74-820f-28ae7f03a86a {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 690.684863] env[69367]: DEBUG nova.compute.manager [None req-1f53a5e1-fbe5-4540-af3f-45cfe8ec9f64 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 25be9c82-df06-498d-b5e7-c59e0ceed475] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 690.775598] env[69367]: ERROR nova.scheduler.client.report [None req-0ff19e7d-e04d-4e0d-8d1c-5d315e749134 tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [req-efac3fbf-7016-4980-a5ab-dafe7e02d101] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-efac3fbf-7016-4980-a5ab-dafe7e02d101"}]} [ 690.775992] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0ff19e7d-e04d-4e0d-8d1c-5d315e749134 tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.200s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 690.776603] env[69367]: ERROR nova.compute.manager [None req-0ff19e7d-e04d-4e0d-8d1c-5d315e749134 tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: 65fcdf23-421a-45c1-880e-a536ec9fbdfd] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 690.776603] env[69367]: ERROR nova.compute.manager [instance: 65fcdf23-421a-45c1-880e-a536ec9fbdfd] Traceback (most recent call last): [ 690.776603] env[69367]: ERROR nova.compute.manager [instance: 65fcdf23-421a-45c1-880e-a536ec9fbdfd] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 690.776603] env[69367]: ERROR nova.compute.manager [instance: 65fcdf23-421a-45c1-880e-a536ec9fbdfd] yield [ 690.776603] env[69367]: ERROR nova.compute.manager [instance: 65fcdf23-421a-45c1-880e-a536ec9fbdfd] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 690.776603] env[69367]: ERROR nova.compute.manager [instance: 65fcdf23-421a-45c1-880e-a536ec9fbdfd] self.set_inventory_for_provider( [ 690.776603] env[69367]: ERROR nova.compute.manager [instance: 65fcdf23-421a-45c1-880e-a536ec9fbdfd] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 690.776603] env[69367]: ERROR nova.compute.manager [instance: 65fcdf23-421a-45c1-880e-a536ec9fbdfd] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 690.776897] env[69367]: ERROR nova.compute.manager [instance: 65fcdf23-421a-45c1-880e-a536ec9fbdfd] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-efac3fbf-7016-4980-a5ab-dafe7e02d101"}]} [ 690.776897] env[69367]: ERROR nova.compute.manager [instance: 65fcdf23-421a-45c1-880e-a536ec9fbdfd] [ 690.776897] env[69367]: ERROR nova.compute.manager [instance: 65fcdf23-421a-45c1-880e-a536ec9fbdfd] During handling of the above exception, another exception occurred: [ 690.776897] env[69367]: ERROR nova.compute.manager [instance: 65fcdf23-421a-45c1-880e-a536ec9fbdfd] [ 690.776897] env[69367]: ERROR nova.compute.manager [instance: 65fcdf23-421a-45c1-880e-a536ec9fbdfd] Traceback (most recent call last): [ 690.776897] env[69367]: ERROR nova.compute.manager [instance: 65fcdf23-421a-45c1-880e-a536ec9fbdfd] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 690.776897] env[69367]: ERROR nova.compute.manager [instance: 65fcdf23-421a-45c1-880e-a536ec9fbdfd] with self.rt.instance_claim(context, instance, node, allocs, [ 690.776897] env[69367]: ERROR nova.compute.manager [instance: 65fcdf23-421a-45c1-880e-a536ec9fbdfd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 690.776897] env[69367]: ERROR nova.compute.manager [instance: 65fcdf23-421a-45c1-880e-a536ec9fbdfd] return f(*args, **kwargs) [ 690.777300] env[69367]: ERROR nova.compute.manager [instance: 65fcdf23-421a-45c1-880e-a536ec9fbdfd] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 690.777300] env[69367]: ERROR nova.compute.manager [instance: 65fcdf23-421a-45c1-880e-a536ec9fbdfd] self._update(elevated, cn) [ 690.777300] env[69367]: ERROR nova.compute.manager [instance: 65fcdf23-421a-45c1-880e-a536ec9fbdfd] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 690.777300] env[69367]: ERROR nova.compute.manager [instance: 65fcdf23-421a-45c1-880e-a536ec9fbdfd] self._update_to_placement(context, compute_node, startup) [ 690.777300] env[69367]: ERROR nova.compute.manager [instance: 65fcdf23-421a-45c1-880e-a536ec9fbdfd] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 690.777300] env[69367]: ERROR nova.compute.manager [instance: 65fcdf23-421a-45c1-880e-a536ec9fbdfd] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 690.777300] env[69367]: ERROR nova.compute.manager [instance: 65fcdf23-421a-45c1-880e-a536ec9fbdfd] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 690.777300] env[69367]: ERROR nova.compute.manager [instance: 65fcdf23-421a-45c1-880e-a536ec9fbdfd] return attempt.get(self._wrap_exception) [ 690.777300] env[69367]: ERROR nova.compute.manager [instance: 65fcdf23-421a-45c1-880e-a536ec9fbdfd] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 690.777300] env[69367]: ERROR nova.compute.manager [instance: 65fcdf23-421a-45c1-880e-a536ec9fbdfd] six.reraise(self.value[0], self.value[1], self.value[2]) [ 690.777300] env[69367]: ERROR nova.compute.manager [instance: 65fcdf23-421a-45c1-880e-a536ec9fbdfd] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 690.777300] env[69367]: ERROR nova.compute.manager [instance: 65fcdf23-421a-45c1-880e-a536ec9fbdfd] raise value [ 690.777300] env[69367]: ERROR nova.compute.manager [instance: 65fcdf23-421a-45c1-880e-a536ec9fbdfd] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 690.777733] env[69367]: ERROR nova.compute.manager [instance: 65fcdf23-421a-45c1-880e-a536ec9fbdfd] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 690.777733] env[69367]: ERROR nova.compute.manager [instance: 65fcdf23-421a-45c1-880e-a536ec9fbdfd] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 690.777733] env[69367]: ERROR nova.compute.manager [instance: 65fcdf23-421a-45c1-880e-a536ec9fbdfd] self.reportclient.update_from_provider_tree( [ 690.777733] env[69367]: ERROR nova.compute.manager [instance: 65fcdf23-421a-45c1-880e-a536ec9fbdfd] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 690.777733] env[69367]: ERROR nova.compute.manager [instance: 65fcdf23-421a-45c1-880e-a536ec9fbdfd] with catch_all(pd.uuid): [ 690.777733] env[69367]: ERROR nova.compute.manager [instance: 65fcdf23-421a-45c1-880e-a536ec9fbdfd] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 690.777733] env[69367]: ERROR nova.compute.manager [instance: 65fcdf23-421a-45c1-880e-a536ec9fbdfd] self.gen.throw(typ, value, traceback) [ 690.777733] env[69367]: ERROR nova.compute.manager [instance: 65fcdf23-421a-45c1-880e-a536ec9fbdfd] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 690.777733] env[69367]: ERROR nova.compute.manager [instance: 65fcdf23-421a-45c1-880e-a536ec9fbdfd] raise exception.ResourceProviderSyncFailed() [ 690.777733] env[69367]: ERROR nova.compute.manager [instance: 65fcdf23-421a-45c1-880e-a536ec9fbdfd] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 690.777733] env[69367]: ERROR nova.compute.manager [instance: 65fcdf23-421a-45c1-880e-a536ec9fbdfd] [ 690.778068] env[69367]: DEBUG nova.compute.utils [None req-0ff19e7d-e04d-4e0d-8d1c-5d315e749134 tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: 65fcdf23-421a-45c1-880e-a536ec9fbdfd] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 690.778993] env[69367]: DEBUG nova.compute.manager [None req-0ff19e7d-e04d-4e0d-8d1c-5d315e749134 tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: 65fcdf23-421a-45c1-880e-a536ec9fbdfd] Build of instance 65fcdf23-421a-45c1-880e-a536ec9fbdfd was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 690.779431] env[69367]: DEBUG nova.compute.manager [None req-0ff19e7d-e04d-4e0d-8d1c-5d315e749134 tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: 65fcdf23-421a-45c1-880e-a536ec9fbdfd] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 690.779656] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0ff19e7d-e04d-4e0d-8d1c-5d315e749134 tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Acquiring lock "refresh_cache-65fcdf23-421a-45c1-880e-a536ec9fbdfd" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 690.779808] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0ff19e7d-e04d-4e0d-8d1c-5d315e749134 tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Acquired lock "refresh_cache-65fcdf23-421a-45c1-880e-a536ec9fbdfd" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 690.779974] env[69367]: DEBUG nova.network.neutron [None req-0ff19e7d-e04d-4e0d-8d1c-5d315e749134 tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: 65fcdf23-421a-45c1-880e-a536ec9fbdfd] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 690.782043] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e9a8e492-a9b6-49b5-a43e-ab2503d52af0 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.368s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 690.783810] env[69367]: INFO nova.compute.claims [None req-e9a8e492-a9b6-49b5-a43e-ab2503d52af0 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: 171efb4b-7da6-4db3-88db-c36a9d04f872] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 690.800600] env[69367]: INFO nova.compute.manager [None req-aabb4e8c-4072-4fce-bdec-9388128f54ff tempest-MigrationsAdminTest-1077393646 tempest-MigrationsAdminTest-1077393646-project-member] [instance: 27267edf-97f5-4238-8d9a-c2ddf0bb252c] Took 1.05 seconds to deallocate network for instance. [ 690.809344] env[69367]: DEBUG oslo_concurrency.lockutils [None req-be828dd9-f06a-4267-b660-1a75113835c1 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 691.135048] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 691.137119] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 691.228451] env[69367]: DEBUG oslo_concurrency.lockutils [None req-1f53a5e1-fbe5-4540-af3f-45cfe8ec9f64 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 691.304025] env[69367]: DEBUG nova.network.neutron [None req-0ff19e7d-e04d-4e0d-8d1c-5d315e749134 tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: 65fcdf23-421a-45c1-880e-a536ec9fbdfd] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 691.407331] env[69367]: DEBUG nova.network.neutron [None req-0ff19e7d-e04d-4e0d-8d1c-5d315e749134 tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: 65fcdf23-421a-45c1-880e-a536ec9fbdfd] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 691.649397] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 691.649397] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 691.649397] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 691.649397] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 691.650274] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 691.650274] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 691.650274] env[69367]: DEBUG nova.compute.manager [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69367) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11183}} [ 691.650274] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 691.775018] env[69367]: DEBUG oslo_concurrency.lockutils [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Acquiring lock "f8c07fa1-d27c-4d0f-847b-481477cd04bf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 691.775018] env[69367]: DEBUG oslo_concurrency.lockutils [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Lock "f8c07fa1-d27c-4d0f-847b-481477cd04bf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 691.819311] env[69367]: DEBUG nova.scheduler.client.report [None req-e9a8e492-a9b6-49b5-a43e-ab2503d52af0 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 691.845235] env[69367]: DEBUG nova.scheduler.client.report [None req-e9a8e492-a9b6-49b5-a43e-ab2503d52af0 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 691.845479] env[69367]: DEBUG nova.compute.provider_tree [None req-e9a8e492-a9b6-49b5-a43e-ab2503d52af0 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 691.848823] env[69367]: INFO nova.scheduler.client.report [None req-aabb4e8c-4072-4fce-bdec-9388128f54ff tempest-MigrationsAdminTest-1077393646 tempest-MigrationsAdminTest-1077393646-project-member] Deleted allocations for instance 27267edf-97f5-4238-8d9a-c2ddf0bb252c [ 691.860781] env[69367]: DEBUG nova.scheduler.client.report [None req-e9a8e492-a9b6-49b5-a43e-ab2503d52af0 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 691.887027] env[69367]: DEBUG nova.scheduler.client.report [None req-e9a8e492-a9b6-49b5-a43e-ab2503d52af0 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 691.911383] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0ff19e7d-e04d-4e0d-8d1c-5d315e749134 tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Releasing lock "refresh_cache-65fcdf23-421a-45c1-880e-a536ec9fbdfd" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 691.911383] env[69367]: DEBUG nova.compute.manager [None req-0ff19e7d-e04d-4e0d-8d1c-5d315e749134 tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 691.911383] env[69367]: DEBUG nova.compute.manager [None req-0ff19e7d-e04d-4e0d-8d1c-5d315e749134 tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: 65fcdf23-421a-45c1-880e-a536ec9fbdfd] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 691.911383] env[69367]: DEBUG nova.network.neutron [None req-0ff19e7d-e04d-4e0d-8d1c-5d315e749134 tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: 65fcdf23-421a-45c1-880e-a536ec9fbdfd] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 691.951253] env[69367]: DEBUG nova.network.neutron [None req-0ff19e7d-e04d-4e0d-8d1c-5d315e749134 tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: 65fcdf23-421a-45c1-880e-a536ec9fbdfd] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 692.157774] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 692.230585] env[69367]: DEBUG oslo_concurrency.lockutils [None req-1d63d810-5e66-402c-9d42-c4dffd33b19e tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Acquiring lock "5c7b2127-e875-4222-8148-a2ea60631c25" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 692.230585] env[69367]: DEBUG oslo_concurrency.lockutils [None req-1d63d810-5e66-402c-9d42-c4dffd33b19e tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Lock "5c7b2127-e875-4222-8148-a2ea60631c25" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 692.230585] env[69367]: DEBUG oslo_concurrency.lockutils [None req-1d63d810-5e66-402c-9d42-c4dffd33b19e tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Acquiring lock "5c7b2127-e875-4222-8148-a2ea60631c25-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 692.230762] env[69367]: DEBUG oslo_concurrency.lockutils [None req-1d63d810-5e66-402c-9d42-c4dffd33b19e tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Lock "5c7b2127-e875-4222-8148-a2ea60631c25-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 692.230970] env[69367]: DEBUG oslo_concurrency.lockutils [None req-1d63d810-5e66-402c-9d42-c4dffd33b19e tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Lock "5c7b2127-e875-4222-8148-a2ea60631c25-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 692.234038] env[69367]: INFO nova.compute.manager [None req-1d63d810-5e66-402c-9d42-c4dffd33b19e tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] Terminating instance [ 692.324151] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7b5fc6a-a31f-4fa5-90b9-28083151405e {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.332939] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ac65bbf-115d-4cb3-8cbb-21351603fad3 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.369893] env[69367]: DEBUG oslo_concurrency.lockutils [None req-aabb4e8c-4072-4fce-bdec-9388128f54ff tempest-MigrationsAdminTest-1077393646 tempest-MigrationsAdminTest-1077393646-project-member] Lock "27267edf-97f5-4238-8d9a-c2ddf0bb252c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.234s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 692.371792] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9f4da89-78ae-45e3-aa0d-7fdcc3a682e1 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.381692] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca315768-20d2-41cf-9c62-b3e2a0c94133 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.398208] env[69367]: DEBUG nova.compute.provider_tree [None req-e9a8e492-a9b6-49b5-a43e-ab2503d52af0 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 692.454502] env[69367]: DEBUG nova.network.neutron [None req-0ff19e7d-e04d-4e0d-8d1c-5d315e749134 tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: 65fcdf23-421a-45c1-880e-a536ec9fbdfd] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 692.742933] env[69367]: DEBUG oslo_concurrency.lockutils [None req-1d63d810-5e66-402c-9d42-c4dffd33b19e tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Acquiring lock "refresh_cache-5c7b2127-e875-4222-8148-a2ea60631c25" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 692.744083] env[69367]: DEBUG oslo_concurrency.lockutils [None req-1d63d810-5e66-402c-9d42-c4dffd33b19e tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Acquired lock "refresh_cache-5c7b2127-e875-4222-8148-a2ea60631c25" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 692.746687] env[69367]: DEBUG nova.network.neutron [None req-1d63d810-5e66-402c-9d42-c4dffd33b19e tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 692.878514] env[69367]: DEBUG nova.compute.manager [None req-4d7c067b-2e06-4f0e-b1ed-f10936ffec18 tempest-ServersNegativeTestMultiTenantJSON-1342218623 tempest-ServersNegativeTestMultiTenantJSON-1342218623-project-member] [instance: 13c1b1aa-2190-4d28-81ad-697e4c098897] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 692.935803] env[69367]: ERROR nova.scheduler.client.report [None req-e9a8e492-a9b6-49b5-a43e-ab2503d52af0 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [req-0b6053ec-7b95-40ff-8014-750762c62ab8] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-0b6053ec-7b95-40ff-8014-750762c62ab8"}]} [ 692.936022] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e9a8e492-a9b6-49b5-a43e-ab2503d52af0 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.154s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 692.936559] env[69367]: ERROR nova.compute.manager [None req-e9a8e492-a9b6-49b5-a43e-ab2503d52af0 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: 171efb4b-7da6-4db3-88db-c36a9d04f872] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 692.936559] env[69367]: ERROR nova.compute.manager [instance: 171efb4b-7da6-4db3-88db-c36a9d04f872] Traceback (most recent call last): [ 692.936559] env[69367]: ERROR nova.compute.manager [instance: 171efb4b-7da6-4db3-88db-c36a9d04f872] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 692.936559] env[69367]: ERROR nova.compute.manager [instance: 171efb4b-7da6-4db3-88db-c36a9d04f872] yield [ 692.936559] env[69367]: ERROR nova.compute.manager [instance: 171efb4b-7da6-4db3-88db-c36a9d04f872] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 692.936559] env[69367]: ERROR nova.compute.manager [instance: 171efb4b-7da6-4db3-88db-c36a9d04f872] self.set_inventory_for_provider( [ 692.936559] env[69367]: ERROR nova.compute.manager [instance: 171efb4b-7da6-4db3-88db-c36a9d04f872] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 692.936559] env[69367]: ERROR nova.compute.manager [instance: 171efb4b-7da6-4db3-88db-c36a9d04f872] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 692.936807] env[69367]: ERROR nova.compute.manager [instance: 171efb4b-7da6-4db3-88db-c36a9d04f872] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-0b6053ec-7b95-40ff-8014-750762c62ab8"}]} [ 692.936807] env[69367]: ERROR nova.compute.manager [instance: 171efb4b-7da6-4db3-88db-c36a9d04f872] [ 692.936807] env[69367]: ERROR nova.compute.manager [instance: 171efb4b-7da6-4db3-88db-c36a9d04f872] During handling of the above exception, another exception occurred: [ 692.936807] env[69367]: ERROR nova.compute.manager [instance: 171efb4b-7da6-4db3-88db-c36a9d04f872] [ 692.936807] env[69367]: ERROR nova.compute.manager [instance: 171efb4b-7da6-4db3-88db-c36a9d04f872] Traceback (most recent call last): [ 692.936807] env[69367]: ERROR nova.compute.manager [instance: 171efb4b-7da6-4db3-88db-c36a9d04f872] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 692.936807] env[69367]: ERROR nova.compute.manager [instance: 171efb4b-7da6-4db3-88db-c36a9d04f872] with self.rt.instance_claim(context, instance, node, allocs, [ 692.936807] env[69367]: ERROR nova.compute.manager [instance: 171efb4b-7da6-4db3-88db-c36a9d04f872] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 692.936807] env[69367]: ERROR nova.compute.manager [instance: 171efb4b-7da6-4db3-88db-c36a9d04f872] return f(*args, **kwargs) [ 692.937055] env[69367]: ERROR nova.compute.manager [instance: 171efb4b-7da6-4db3-88db-c36a9d04f872] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 692.937055] env[69367]: ERROR nova.compute.manager [instance: 171efb4b-7da6-4db3-88db-c36a9d04f872] self._update(elevated, cn) [ 692.937055] env[69367]: ERROR nova.compute.manager [instance: 171efb4b-7da6-4db3-88db-c36a9d04f872] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 692.937055] env[69367]: ERROR nova.compute.manager [instance: 171efb4b-7da6-4db3-88db-c36a9d04f872] self._update_to_placement(context, compute_node, startup) [ 692.937055] env[69367]: ERROR nova.compute.manager [instance: 171efb4b-7da6-4db3-88db-c36a9d04f872] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 692.937055] env[69367]: ERROR nova.compute.manager [instance: 171efb4b-7da6-4db3-88db-c36a9d04f872] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 692.937055] env[69367]: ERROR nova.compute.manager [instance: 171efb4b-7da6-4db3-88db-c36a9d04f872] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 692.937055] env[69367]: ERROR nova.compute.manager [instance: 171efb4b-7da6-4db3-88db-c36a9d04f872] return attempt.get(self._wrap_exception) [ 692.937055] env[69367]: ERROR nova.compute.manager [instance: 171efb4b-7da6-4db3-88db-c36a9d04f872] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 692.937055] env[69367]: ERROR nova.compute.manager [instance: 171efb4b-7da6-4db3-88db-c36a9d04f872] six.reraise(self.value[0], self.value[1], self.value[2]) [ 692.937055] env[69367]: ERROR nova.compute.manager [instance: 171efb4b-7da6-4db3-88db-c36a9d04f872] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 692.937055] env[69367]: ERROR nova.compute.manager [instance: 171efb4b-7da6-4db3-88db-c36a9d04f872] raise value [ 692.937055] env[69367]: ERROR nova.compute.manager [instance: 171efb4b-7da6-4db3-88db-c36a9d04f872] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 692.937359] env[69367]: ERROR nova.compute.manager [instance: 171efb4b-7da6-4db3-88db-c36a9d04f872] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 692.937359] env[69367]: ERROR nova.compute.manager [instance: 171efb4b-7da6-4db3-88db-c36a9d04f872] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 692.937359] env[69367]: ERROR nova.compute.manager [instance: 171efb4b-7da6-4db3-88db-c36a9d04f872] self.reportclient.update_from_provider_tree( [ 692.937359] env[69367]: ERROR nova.compute.manager [instance: 171efb4b-7da6-4db3-88db-c36a9d04f872] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 692.937359] env[69367]: ERROR nova.compute.manager [instance: 171efb4b-7da6-4db3-88db-c36a9d04f872] with catch_all(pd.uuid): [ 692.937359] env[69367]: ERROR nova.compute.manager [instance: 171efb4b-7da6-4db3-88db-c36a9d04f872] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 692.937359] env[69367]: ERROR nova.compute.manager [instance: 171efb4b-7da6-4db3-88db-c36a9d04f872] self.gen.throw(typ, value, traceback) [ 692.937359] env[69367]: ERROR nova.compute.manager [instance: 171efb4b-7da6-4db3-88db-c36a9d04f872] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 692.937359] env[69367]: ERROR nova.compute.manager [instance: 171efb4b-7da6-4db3-88db-c36a9d04f872] raise exception.ResourceProviderSyncFailed() [ 692.937359] env[69367]: ERROR nova.compute.manager [instance: 171efb4b-7da6-4db3-88db-c36a9d04f872] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 692.937359] env[69367]: ERROR nova.compute.manager [instance: 171efb4b-7da6-4db3-88db-c36a9d04f872] [ 692.937618] env[69367]: DEBUG nova.compute.utils [None req-e9a8e492-a9b6-49b5-a43e-ab2503d52af0 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: 171efb4b-7da6-4db3-88db-c36a9d04f872] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 692.938779] env[69367]: DEBUG oslo_concurrency.lockutils [None req-4a4e2df7-def0-4f89-8bf6-a91349a96fef tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.713s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 692.939022] env[69367]: DEBUG nova.objects.instance [None req-4a4e2df7-def0-4f89-8bf6-a91349a96fef tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Lazy-loading 'resources' on Instance uuid 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9 {{(pid=69367) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 692.943339] env[69367]: DEBUG nova.compute.manager [None req-e9a8e492-a9b6-49b5-a43e-ab2503d52af0 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: 171efb4b-7da6-4db3-88db-c36a9d04f872] Build of instance 171efb4b-7da6-4db3-88db-c36a9d04f872 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 692.943339] env[69367]: DEBUG nova.compute.manager [None req-e9a8e492-a9b6-49b5-a43e-ab2503d52af0 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: 171efb4b-7da6-4db3-88db-c36a9d04f872] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 692.943339] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e9a8e492-a9b6-49b5-a43e-ab2503d52af0 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Acquiring lock "refresh_cache-171efb4b-7da6-4db3-88db-c36a9d04f872" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 692.943339] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e9a8e492-a9b6-49b5-a43e-ab2503d52af0 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Acquired lock "refresh_cache-171efb4b-7da6-4db3-88db-c36a9d04f872" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 692.943339] env[69367]: DEBUG nova.network.neutron [None req-e9a8e492-a9b6-49b5-a43e-ab2503d52af0 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: 171efb4b-7da6-4db3-88db-c36a9d04f872] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 692.958579] env[69367]: INFO nova.compute.manager [None req-0ff19e7d-e04d-4e0d-8d1c-5d315e749134 tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: 65fcdf23-421a-45c1-880e-a536ec9fbdfd] Took 1.05 seconds to deallocate network for instance. [ 693.010040] env[69367]: DEBUG nova.compute.manager [req-7a435e35-d78c-44f0-9d5c-eaaffbc218ac req-22f7527a-5e20-479a-ac00-bb74ed0f5199 service nova] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Received event network-changed-5401116f-daf2-4db0-b052-7bd1adb63cc1 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 693.010211] env[69367]: DEBUG nova.compute.manager [req-7a435e35-d78c-44f0-9d5c-eaaffbc218ac req-22f7527a-5e20-479a-ac00-bb74ed0f5199 service nova] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Refreshing instance network info cache due to event network-changed-5401116f-daf2-4db0-b052-7bd1adb63cc1. {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11772}} [ 693.010429] env[69367]: DEBUG oslo_concurrency.lockutils [req-7a435e35-d78c-44f0-9d5c-eaaffbc218ac req-22f7527a-5e20-479a-ac00-bb74ed0f5199 service nova] Acquiring lock "refresh_cache-d2f8328d-fd05-4e63-9cbd-a6e3ec948964" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 693.010575] env[69367]: DEBUG oslo_concurrency.lockutils [req-7a435e35-d78c-44f0-9d5c-eaaffbc218ac req-22f7527a-5e20-479a-ac00-bb74ed0f5199 service nova] Acquired lock "refresh_cache-d2f8328d-fd05-4e63-9cbd-a6e3ec948964" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 693.010738] env[69367]: DEBUG nova.network.neutron [req-7a435e35-d78c-44f0-9d5c-eaaffbc218ac req-22f7527a-5e20-479a-ac00-bb74ed0f5199 service nova] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Refreshing network info cache for port 5401116f-daf2-4db0-b052-7bd1adb63cc1 {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 693.277507] env[69367]: DEBUG nova.network.neutron [None req-1d63d810-5e66-402c-9d42-c4dffd33b19e tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 693.359520] env[69367]: DEBUG nova.network.neutron [None req-1d63d810-5e66-402c-9d42-c4dffd33b19e tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 693.408244] env[69367]: DEBUG oslo_concurrency.lockutils [None req-4d7c067b-2e06-4f0e-b1ed-f10936ffec18 tempest-ServersNegativeTestMultiTenantJSON-1342218623 tempest-ServersNegativeTestMultiTenantJSON-1342218623-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 693.465175] env[69367]: DEBUG nova.scheduler.client.report [None req-4a4e2df7-def0-4f89-8bf6-a91349a96fef tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 693.475887] env[69367]: DEBUG nova.network.neutron [None req-e9a8e492-a9b6-49b5-a43e-ab2503d52af0 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: 171efb4b-7da6-4db3-88db-c36a9d04f872] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 693.490412] env[69367]: DEBUG nova.scheduler.client.report [None req-4a4e2df7-def0-4f89-8bf6-a91349a96fef tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 693.490758] env[69367]: DEBUG nova.compute.provider_tree [None req-4a4e2df7-def0-4f89-8bf6-a91349a96fef tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 693.511620] env[69367]: DEBUG nova.scheduler.client.report [None req-4a4e2df7-def0-4f89-8bf6-a91349a96fef tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 693.542256] env[69367]: DEBUG nova.scheduler.client.report [None req-4a4e2df7-def0-4f89-8bf6-a91349a96fef tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 693.641648] env[69367]: DEBUG nova.network.neutron [None req-e9a8e492-a9b6-49b5-a43e-ab2503d52af0 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: 171efb4b-7da6-4db3-88db-c36a9d04f872] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 693.862354] env[69367]: DEBUG oslo_concurrency.lockutils [None req-1d63d810-5e66-402c-9d42-c4dffd33b19e tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Releasing lock "refresh_cache-5c7b2127-e875-4222-8148-a2ea60631c25" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 693.862799] env[69367]: DEBUG nova.compute.manager [None req-1d63d810-5e66-402c-9d42-c4dffd33b19e tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] Start destroying the instance on the hypervisor. {{(pid=69367) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 693.863093] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-1d63d810-5e66-402c-9d42-c4dffd33b19e tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] Destroying instance {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 693.863311] env[69367]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ffe41fc6-5e3c-439d-80ea-7f9ac0975be1 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.874226] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5149d548-f171-43b2-ac5b-c3ca14f3a01f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.906029] env[69367]: WARNING nova.virt.vmwareapi.vmops [None req-1d63d810-5e66-402c-9d42-c4dffd33b19e tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5c7b2127-e875-4222-8148-a2ea60631c25 could not be found. [ 693.906263] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-1d63d810-5e66-402c-9d42-c4dffd33b19e tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] Instance destroyed {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 693.906446] env[69367]: INFO nova.compute.manager [None req-1d63d810-5e66-402c-9d42-c4dffd33b19e tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] Took 0.04 seconds to destroy the instance on the hypervisor. [ 693.906918] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1d63d810-5e66-402c-9d42-c4dffd33b19e tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 693.914018] env[69367]: DEBUG nova.compute.manager [-] [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 693.914018] env[69367]: DEBUG nova.network.neutron [-] [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 693.960093] env[69367]: DEBUG nova.network.neutron [-] [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 694.001535] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0185a06-88b0-4ade-80d1-302948aabdb1 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.008857] env[69367]: INFO nova.scheduler.client.report [None req-0ff19e7d-e04d-4e0d-8d1c-5d315e749134 tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Deleted allocations for instance 65fcdf23-421a-45c1-880e-a536ec9fbdfd [ 694.021044] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25f74f70-e267-482d-b8eb-5ead6b55c2bc {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.059482] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76e7eeb2-deb7-49a6-9bb4-85358d23e6e9 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.068109] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bae929f-6ee4-4665-9009-f0b18f6217c6 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.084728] env[69367]: DEBUG nova.compute.provider_tree [None req-4a4e2df7-def0-4f89-8bf6-a91349a96fef tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 694.087812] env[69367]: DEBUG oslo_concurrency.lockutils [None req-8034ed55-f905-44d0-a144-125588b682dc tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Acquiring lock "fa4a5dbc-b885-4439-8520-0bfff38438b3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 694.088057] env[69367]: DEBUG oslo_concurrency.lockutils [None req-8034ed55-f905-44d0-a144-125588b682dc tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Lock "fa4a5dbc-b885-4439-8520-0bfff38438b3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 694.088264] env[69367]: DEBUG oslo_concurrency.lockutils [None req-8034ed55-f905-44d0-a144-125588b682dc tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Acquiring lock "fa4a5dbc-b885-4439-8520-0bfff38438b3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 694.088446] env[69367]: DEBUG oslo_concurrency.lockutils [None req-8034ed55-f905-44d0-a144-125588b682dc tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Lock "fa4a5dbc-b885-4439-8520-0bfff38438b3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 694.088610] env[69367]: DEBUG oslo_concurrency.lockutils [None req-8034ed55-f905-44d0-a144-125588b682dc tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Lock "fa4a5dbc-b885-4439-8520-0bfff38438b3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 694.090972] env[69367]: INFO nova.compute.manager [None req-8034ed55-f905-44d0-a144-125588b682dc tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] [instance: fa4a5dbc-b885-4439-8520-0bfff38438b3] Terminating instance [ 694.145473] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e9a8e492-a9b6-49b5-a43e-ab2503d52af0 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Releasing lock "refresh_cache-171efb4b-7da6-4db3-88db-c36a9d04f872" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 694.145727] env[69367]: DEBUG nova.compute.manager [None req-e9a8e492-a9b6-49b5-a43e-ab2503d52af0 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 694.145915] env[69367]: DEBUG nova.compute.manager [None req-e9a8e492-a9b6-49b5-a43e-ab2503d52af0 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: 171efb4b-7da6-4db3-88db-c36a9d04f872] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 694.146105] env[69367]: DEBUG nova.network.neutron [None req-e9a8e492-a9b6-49b5-a43e-ab2503d52af0 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: 171efb4b-7da6-4db3-88db-c36a9d04f872] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 694.179371] env[69367]: DEBUG nova.network.neutron [None req-e9a8e492-a9b6-49b5-a43e-ab2503d52af0 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: 171efb4b-7da6-4db3-88db-c36a9d04f872] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 694.197453] env[69367]: DEBUG nova.network.neutron [req-7a435e35-d78c-44f0-9d5c-eaaffbc218ac req-22f7527a-5e20-479a-ac00-bb74ed0f5199 service nova] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Updated VIF entry in instance network info cache for port 5401116f-daf2-4db0-b052-7bd1adb63cc1. {{(pid=69367) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 694.197834] env[69367]: DEBUG nova.network.neutron [req-7a435e35-d78c-44f0-9d5c-eaaffbc218ac req-22f7527a-5e20-479a-ac00-bb74ed0f5199 service nova] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Updating instance_info_cache with network_info: [{"id": "5401116f-daf2-4db0-b052-7bd1adb63cc1", "address": "fa:16:3e:d1:24:7a", "network": {"id": "341b1aa4-f821-4feb-8230-bd176112b3c3", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-2139182269-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.138", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d73aa45fc35b44c6a8a2dd8ef127b974", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ea16b8c-ebf6-4bd4-ab67-ddb472e7b460", "external-id": "nsx-vlan-transportzone-156", "segmentation_id": 156, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5401116f-da", "ovs_interfaceid": "5401116f-daf2-4db0-b052-7bd1adb63cc1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 694.463564] env[69367]: DEBUG nova.network.neutron [-] [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 694.525349] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0ff19e7d-e04d-4e0d-8d1c-5d315e749134 tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Lock "65fcdf23-421a-45c1-880e-a536ec9fbdfd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.683s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 694.600217] env[69367]: DEBUG nova.compute.manager [None req-8034ed55-f905-44d0-a144-125588b682dc tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] [instance: fa4a5dbc-b885-4439-8520-0bfff38438b3] Start destroying the instance on the hypervisor. {{(pid=69367) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 694.600217] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-8034ed55-f905-44d0-a144-125588b682dc tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] [instance: fa4a5dbc-b885-4439-8520-0bfff38438b3] Destroying instance {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 694.600217] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-695353c5-d0a5-4018-b3c5-8abd1921eda9 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.613648] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-8034ed55-f905-44d0-a144-125588b682dc tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] [instance: fa4a5dbc-b885-4439-8520-0bfff38438b3] Powering off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 694.613648] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7b1042e1-27a0-4019-b0dd-a0e9524ace62 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.616576] env[69367]: ERROR nova.scheduler.client.report [None req-4a4e2df7-def0-4f89-8bf6-a91349a96fef tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] [req-d4bbe561-d8bd-4620-b728-29dd17c15d06] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-d4bbe561-d8bd-4620-b728-29dd17c15d06"}]} [ 694.616987] env[69367]: DEBUG oslo_concurrency.lockutils [None req-4a4e2df7-def0-4f89-8bf6-a91349a96fef tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.678s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 694.617604] env[69367]: ERROR nova.compute.manager [None req-4a4e2df7-def0-4f89-8bf6-a91349a96fef tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] Setting instance vm_state to ERROR: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 694.617604] env[69367]: ERROR nova.compute.manager [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] Traceback (most recent call last): [ 694.617604] env[69367]: ERROR nova.compute.manager [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 694.617604] env[69367]: ERROR nova.compute.manager [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] yield [ 694.617604] env[69367]: ERROR nova.compute.manager [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 694.617604] env[69367]: ERROR nova.compute.manager [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] self.set_inventory_for_provider( [ 694.617604] env[69367]: ERROR nova.compute.manager [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 694.617604] env[69367]: ERROR nova.compute.manager [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 694.617805] env[69367]: ERROR nova.compute.manager [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-d4bbe561-d8bd-4620-b728-29dd17c15d06"}]} [ 694.617805] env[69367]: ERROR nova.compute.manager [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] [ 694.617805] env[69367]: ERROR nova.compute.manager [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] During handling of the above exception, another exception occurred: [ 694.617805] env[69367]: ERROR nova.compute.manager [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] [ 694.617805] env[69367]: ERROR nova.compute.manager [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] Traceback (most recent call last): [ 694.617805] env[69367]: ERROR nova.compute.manager [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 694.617805] env[69367]: ERROR nova.compute.manager [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] self._delete_instance(context, instance, bdms) [ 694.617805] env[69367]: ERROR nova.compute.manager [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 694.617805] env[69367]: ERROR nova.compute.manager [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] self._complete_deletion(context, instance) [ 694.618267] env[69367]: ERROR nova.compute.manager [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 694.618267] env[69367]: ERROR nova.compute.manager [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] self._update_resource_tracker(context, instance) [ 694.618267] env[69367]: ERROR nova.compute.manager [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 694.618267] env[69367]: ERROR nova.compute.manager [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] self.rt.update_usage(context, instance, instance.node) [ 694.618267] env[69367]: ERROR nova.compute.manager [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 694.618267] env[69367]: ERROR nova.compute.manager [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] return f(*args, **kwargs) [ 694.618267] env[69367]: ERROR nova.compute.manager [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 694.618267] env[69367]: ERROR nova.compute.manager [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] self._update(context.elevated(), self.compute_nodes[nodename]) [ 694.618267] env[69367]: ERROR nova.compute.manager [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 694.618267] env[69367]: ERROR nova.compute.manager [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] self._update_to_placement(context, compute_node, startup) [ 694.618267] env[69367]: ERROR nova.compute.manager [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 694.618267] env[69367]: ERROR nova.compute.manager [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 694.618591] env[69367]: ERROR nova.compute.manager [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 694.618591] env[69367]: ERROR nova.compute.manager [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] return attempt.get(self._wrap_exception) [ 694.618591] env[69367]: ERROR nova.compute.manager [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 694.618591] env[69367]: ERROR nova.compute.manager [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] six.reraise(self.value[0], self.value[1], self.value[2]) [ 694.618591] env[69367]: ERROR nova.compute.manager [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 694.618591] env[69367]: ERROR nova.compute.manager [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] raise value [ 694.618591] env[69367]: ERROR nova.compute.manager [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 694.618591] env[69367]: ERROR nova.compute.manager [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 694.618591] env[69367]: ERROR nova.compute.manager [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 694.618591] env[69367]: ERROR nova.compute.manager [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] self.reportclient.update_from_provider_tree( [ 694.618591] env[69367]: ERROR nova.compute.manager [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 694.618591] env[69367]: ERROR nova.compute.manager [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] with catch_all(pd.uuid): [ 694.618591] env[69367]: ERROR nova.compute.manager [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 694.618963] env[69367]: ERROR nova.compute.manager [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] self.gen.throw(typ, value, traceback) [ 694.618963] env[69367]: ERROR nova.compute.manager [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 694.618963] env[69367]: ERROR nova.compute.manager [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] raise exception.ResourceProviderSyncFailed() [ 694.618963] env[69367]: ERROR nova.compute.manager [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 694.618963] env[69367]: ERROR nova.compute.manager [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] [ 694.620628] env[69367]: DEBUG oslo_concurrency.lockutils [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.244s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 694.622382] env[69367]: INFO nova.compute.claims [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 694.627637] env[69367]: DEBUG oslo_vmware.api [None req-8034ed55-f905-44d0-a144-125588b682dc tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Waiting for the task: (returnval){ [ 694.627637] env[69367]: value = "task-4233873" [ 694.627637] env[69367]: _type = "Task" [ 694.627637] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.637586] env[69367]: DEBUG oslo_vmware.api [None req-8034ed55-f905-44d0-a144-125588b682dc tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Task: {'id': task-4233873, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.684396] env[69367]: DEBUG nova.network.neutron [None req-e9a8e492-a9b6-49b5-a43e-ab2503d52af0 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: 171efb4b-7da6-4db3-88db-c36a9d04f872] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 694.700093] env[69367]: DEBUG oslo_concurrency.lockutils [req-7a435e35-d78c-44f0-9d5c-eaaffbc218ac req-22f7527a-5e20-479a-ac00-bb74ed0f5199 service nova] Releasing lock "refresh_cache-d2f8328d-fd05-4e63-9cbd-a6e3ec948964" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 694.966938] env[69367]: INFO nova.compute.manager [-] [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] Took 1.06 seconds to deallocate network for instance. [ 694.967502] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Acquiring lock "837b4093-308b-440b-940d-fc0227a5c590" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 694.967719] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Lock "837b4093-308b-440b-940d-fc0227a5c590" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 695.026033] env[69367]: DEBUG nova.compute.manager [None req-21dbec28-a61b-45fb-99b0-7b920d8d1837 tempest-ServerExternalEventsTest-336054206 tempest-ServerExternalEventsTest-336054206-project-member] [instance: d785944c-d65e-4a9e-91e5-0c0911b25227] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 695.128320] env[69367]: DEBUG oslo_concurrency.lockutils [None req-4a4e2df7-def0-4f89-8bf6-a91349a96fef tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Lock "3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.025s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 695.152581] env[69367]: DEBUG oslo_vmware.api [None req-8034ed55-f905-44d0-a144-125588b682dc tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Task: {'id': task-4233873, 'name': PowerOffVM_Task, 'duration_secs': 0.220294} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.153140] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-8034ed55-f905-44d0-a144-125588b682dc tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] [instance: fa4a5dbc-b885-4439-8520-0bfff38438b3] Powered off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 695.153905] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-8034ed55-f905-44d0-a144-125588b682dc tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] [instance: fa4a5dbc-b885-4439-8520-0bfff38438b3] Unregistering the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 695.154613] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-687684e2-8a2c-4aee-a848-5f724756c5f6 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.186628] env[69367]: INFO nova.compute.manager [None req-e9a8e492-a9b6-49b5-a43e-ab2503d52af0 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: 171efb4b-7da6-4db3-88db-c36a9d04f872] Took 1.04 seconds to deallocate network for instance. [ 695.229878] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-8034ed55-f905-44d0-a144-125588b682dc tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] [instance: fa4a5dbc-b885-4439-8520-0bfff38438b3] Unregistered the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 695.230077] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-8034ed55-f905-44d0-a144-125588b682dc tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] [instance: fa4a5dbc-b885-4439-8520-0bfff38438b3] Deleting contents of the VM from datastore datastore2 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 695.230567] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-8034ed55-f905-44d0-a144-125588b682dc tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Deleting the datastore file [datastore2] fa4a5dbc-b885-4439-8520-0bfff38438b3 {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 695.230605] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6d913418-3100-4948-82cc-cb81ca8ed8f2 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.244078] env[69367]: DEBUG oslo_vmware.api [None req-8034ed55-f905-44d0-a144-125588b682dc tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Waiting for the task: (returnval){ [ 695.244078] env[69367]: value = "task-4233875" [ 695.244078] env[69367]: _type = "Task" [ 695.244078] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.254882] env[69367]: DEBUG oslo_vmware.api [None req-8034ed55-f905-44d0-a144-125588b682dc tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Task: {'id': task-4233875, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.475481] env[69367]: DEBUG oslo_concurrency.lockutils [None req-1d63d810-5e66-402c-9d42-c4dffd33b19e tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 695.571061] env[69367]: DEBUG oslo_concurrency.lockutils [None req-21dbec28-a61b-45fb-99b0-7b920d8d1837 tempest-ServerExternalEventsTest-336054206 tempest-ServerExternalEventsTest-336054206-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 695.665012] env[69367]: DEBUG nova.scheduler.client.report [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 695.683627] env[69367]: DEBUG nova.scheduler.client.report [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 695.684012] env[69367]: DEBUG nova.compute.provider_tree [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 695.709327] env[69367]: DEBUG nova.scheduler.client.report [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 695.744202] env[69367]: DEBUG nova.scheduler.client.report [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 695.763291] env[69367]: DEBUG oslo_vmware.api [None req-8034ed55-f905-44d0-a144-125588b682dc tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Task: {'id': task-4233875, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.407069} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.763823] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-8034ed55-f905-44d0-a144-125588b682dc tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Deleted the datastore file {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 695.763962] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-8034ed55-f905-44d0-a144-125588b682dc tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] [instance: fa4a5dbc-b885-4439-8520-0bfff38438b3] Deleted contents of the VM from datastore datastore2 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 695.764208] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-8034ed55-f905-44d0-a144-125588b682dc tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] [instance: fa4a5dbc-b885-4439-8520-0bfff38438b3] Instance destroyed {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 695.764403] env[69367]: INFO nova.compute.manager [None req-8034ed55-f905-44d0-a144-125588b682dc tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] [instance: fa4a5dbc-b885-4439-8520-0bfff38438b3] Took 1.17 seconds to destroy the instance on the hypervisor. [ 695.764659] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8034ed55-f905-44d0-a144-125588b682dc tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 695.764861] env[69367]: DEBUG nova.compute.manager [-] [instance: fa4a5dbc-b885-4439-8520-0bfff38438b3] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 695.764952] env[69367]: DEBUG nova.network.neutron [-] [instance: fa4a5dbc-b885-4439-8520-0bfff38438b3] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 696.203794] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f317c63e-7a21-4dc1-9553-ce85d4f5301d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.216904] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e01694d9-3907-4c9f-b2dd-41a1fc5133e2 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.261329] env[69367]: INFO nova.scheduler.client.report [None req-e9a8e492-a9b6-49b5-a43e-ab2503d52af0 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Deleted allocations for instance 171efb4b-7da6-4db3-88db-c36a9d04f872 [ 696.268203] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1f50e7b-b6c0-4ed6-8d50-b5bf13f2265f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.277637] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-985dcf99-acc6-4c82-9609-978a18937531 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.297413] env[69367]: DEBUG nova.compute.provider_tree [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Inventory has not changed in ProviderTree for provider: 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 696.654807] env[69367]: DEBUG oslo_concurrency.lockutils [None req-4a4e2df7-def0-4f89-8bf6-a91349a96fef tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 696.773773] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e9a8e492-a9b6-49b5-a43e-ab2503d52af0 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Lock "171efb4b-7da6-4db3-88db-c36a9d04f872" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.787s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 696.800790] env[69367]: DEBUG nova.scheduler.client.report [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Inventory has not changed for provider 19ddf8be-7305-4f70-8366-52a9957232e6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 697.122456] env[69367]: DEBUG nova.network.neutron [-] [instance: fa4a5dbc-b885-4439-8520-0bfff38438b3] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 697.217459] env[69367]: DEBUG nova.compute.manager [req-cc097c23-75e2-40e7-accd-e58331e6ece2 req-e8e41664-69de-40f2-a87a-18c80ef44f69 service nova] [instance: fa4a5dbc-b885-4439-8520-0bfff38438b3] Received event network-vif-deleted-b9b8324a-008d-47c5-a1e4-571d6275a798 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 697.276363] env[69367]: DEBUG nova.compute.manager [None req-b7f7cb48-f31d-4f35-8ea5-66b5e5394715 tempest-VolumesAdminNegativeTest-6889650 tempest-VolumesAdminNegativeTest-6889650-project-member] [instance: de68478e-475a-45ef-9eed-44904fcfc1fd] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 697.307315] env[69367]: DEBUG oslo_concurrency.lockutils [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.687s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 697.307948] env[69367]: DEBUG nova.compute.manager [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] Start building networks asynchronously for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 697.314192] env[69367]: DEBUG oslo_concurrency.lockutils [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.533s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 697.317705] env[69367]: INFO nova.compute.claims [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] [instance: bdc0938b-60ef-463a-b3fd-1754f38a3b79] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 697.390107] env[69367]: DEBUG oslo_concurrency.lockutils [None req-8cddd887-a3bc-4c75-aa6b-511f6208e23b tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Acquiring lock "a0b99237-8f23-40ec-827f-af75961a096d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 697.390358] env[69367]: DEBUG oslo_concurrency.lockutils [None req-8cddd887-a3bc-4c75-aa6b-511f6208e23b tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Lock "a0b99237-8f23-40ec-827f-af75961a096d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 697.625815] env[69367]: INFO nova.compute.manager [-] [instance: fa4a5dbc-b885-4439-8520-0bfff38438b3] Took 1.86 seconds to deallocate network for instance. [ 697.803430] env[69367]: DEBUG oslo_concurrency.lockutils [None req-b7f7cb48-f31d-4f35-8ea5-66b5e5394715 tempest-VolumesAdminNegativeTest-6889650 tempest-VolumesAdminNegativeTest-6889650-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 697.825189] env[69367]: DEBUG nova.compute.utils [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Using /dev/sd instead of None {{(pid=69367) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 697.825298] env[69367]: DEBUG nova.compute.manager [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] Allocating IP information in the background. {{(pid=69367) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 697.825451] env[69367]: DEBUG nova.network.neutron [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] allocate_for_instance() {{(pid=69367) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 697.877672] env[69367]: DEBUG nova.policy [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b93a3158ab7945efa3db4dd17ec7cbf5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '52a42805c17544ae8ad875002f133985', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69367) authorize /opt/stack/nova/nova/policy.py:192}} [ 698.136381] env[69367]: DEBUG oslo_concurrency.lockutils [None req-8034ed55-f905-44d0-a144-125588b682dc tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 698.331458] env[69367]: DEBUG nova.compute.manager [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] Start building block device mappings for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 698.352174] env[69367]: DEBUG nova.network.neutron [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] Successfully created port: 91a80978-9bf2-4ee0-95e7-500f762bec77 {{(pid=69367) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 698.800585] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e55e3d4-6dc0-49bf-b5d8-426ec6843cfd {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.813815] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6373f5b7-cdbc-4f6c-af17-d2dffa670816 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.858566] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-536efbbe-6120-47f7-85bc-5720853188a2 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.866139] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-019a0200-b38e-462f-9df1-ab26f1c7fbcd {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.882142] env[69367]: DEBUG nova.compute.provider_tree [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Inventory has not changed in ProviderTree for provider: 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 699.361152] env[69367]: DEBUG nova.compute.manager [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] Start spawning the instance on the hypervisor. {{(pid=69367) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 699.384826] env[69367]: DEBUG nova.scheduler.client.report [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Inventory has not changed for provider 19ddf8be-7305-4f70-8366-52a9957232e6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 699.407375] env[69367]: DEBUG nova.virt.hardware [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-19T12:49:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-19T12:49:28Z,direct_url=,disk_format='vmdk',id=2b099420-9152-4d93-9609-4c9317824c11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cd7c200d5cd6461fb951580f8c764c42',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-19T12:49:29Z,virtual_size=,visibility=), allow threads: False {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 699.407620] env[69367]: DEBUG nova.virt.hardware [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Flavor limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 699.407784] env[69367]: DEBUG nova.virt.hardware [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Image limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 699.407988] env[69367]: DEBUG nova.virt.hardware [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Flavor pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 699.408554] env[69367]: DEBUG nova.virt.hardware [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Image pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 699.409657] env[69367]: DEBUG nova.virt.hardware [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 699.409657] env[69367]: DEBUG nova.virt.hardware [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 699.409657] env[69367]: DEBUG nova.virt.hardware [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 699.409657] env[69367]: DEBUG nova.virt.hardware [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Got 1 possible topologies {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 699.409657] env[69367]: DEBUG nova.virt.hardware [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 699.409931] env[69367]: DEBUG nova.virt.hardware [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 699.413169] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43edc6d4-69da-493a-adbd-2a260fc3c3f7 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.422187] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58f4da7c-c51c-440b-adf8-d354c3657fb6 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.475544] env[69367]: DEBUG oslo_concurrency.lockutils [None req-02fec56c-5d8a-44e3-9ebd-481a7397161f tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Acquiring lock "37f7f69e-e68a-42cf-8a7c-a3146f664c9a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 699.475544] env[69367]: DEBUG oslo_concurrency.lockutils [None req-02fec56c-5d8a-44e3-9ebd-481a7397161f tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Lock "37f7f69e-e68a-42cf-8a7c-a3146f664c9a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 699.891678] env[69367]: DEBUG oslo_concurrency.lockutils [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.578s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 699.891953] env[69367]: DEBUG nova.compute.manager [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] [instance: bdc0938b-60ef-463a-b3fd-1754f38a3b79] Start building networks asynchronously for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 699.897154] env[69367]: DEBUG oslo_concurrency.lockutils [None req-39e01228-3e3c-4114-afc9-ee22a5731ffc tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.392s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 699.897154] env[69367]: DEBUG nova.objects.instance [None req-39e01228-3e3c-4114-afc9-ee22a5731ffc tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Lazy-loading 'resources' on Instance uuid ba4d981a-19f7-41ef-b7d1-a3f3830fe725 {{(pid=69367) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 700.407749] env[69367]: DEBUG nova.compute.utils [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Using /dev/sd instead of None {{(pid=69367) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 700.409299] env[69367]: DEBUG nova.compute.manager [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] [instance: bdc0938b-60ef-463a-b3fd-1754f38a3b79] Allocating IP information in the background. {{(pid=69367) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 700.409480] env[69367]: DEBUG nova.network.neutron [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] [instance: bdc0938b-60ef-463a-b3fd-1754f38a3b79] allocate_for_instance() {{(pid=69367) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 700.482854] env[69367]: DEBUG nova.network.neutron [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] Successfully updated port: 91a80978-9bf2-4ee0-95e7-500f762bec77 {{(pid=69367) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 700.503261] env[69367]: DEBUG nova.policy [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f3e1b9a3cc41452180c82f054db092a9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '24da0b94570b4204903f829e6292cbca', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69367) authorize /opt/stack/nova/nova/policy.py:192}} [ 700.570204] env[69367]: DEBUG oslo_concurrency.lockutils [None req-6d12701b-472c-4f21-b3e9-0df43501af60 tempest-ServerActionsV293TestJSON-1803979304 tempest-ServerActionsV293TestJSON-1803979304-project-member] Acquiring lock "e6a9b69c-e00d-4260-84c7-2d98ce80ead0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 700.570447] env[69367]: DEBUG oslo_concurrency.lockutils [None req-6d12701b-472c-4f21-b3e9-0df43501af60 tempest-ServerActionsV293TestJSON-1803979304 tempest-ServerActionsV293TestJSON-1803979304-project-member] Lock "e6a9b69c-e00d-4260-84c7-2d98ce80ead0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 700.724384] env[69367]: DEBUG nova.compute.manager [req-5771e117-d300-41e5-975a-9ca2a52ab601 req-8cf1bc78-fad4-4628-92a4-1624b8d4f29a service nova] [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] Received event network-vif-plugged-91a80978-9bf2-4ee0-95e7-500f762bec77 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 700.724384] env[69367]: DEBUG oslo_concurrency.lockutils [req-5771e117-d300-41e5-975a-9ca2a52ab601 req-8cf1bc78-fad4-4628-92a4-1624b8d4f29a service nova] Acquiring lock "10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 700.724384] env[69367]: DEBUG oslo_concurrency.lockutils [req-5771e117-d300-41e5-975a-9ca2a52ab601 req-8cf1bc78-fad4-4628-92a4-1624b8d4f29a service nova] Lock "10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 700.724732] env[69367]: DEBUG oslo_concurrency.lockutils [req-5771e117-d300-41e5-975a-9ca2a52ab601 req-8cf1bc78-fad4-4628-92a4-1624b8d4f29a service nova] Lock "10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 700.726721] env[69367]: DEBUG nova.compute.manager [req-5771e117-d300-41e5-975a-9ca2a52ab601 req-8cf1bc78-fad4-4628-92a4-1624b8d4f29a service nova] [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] No waiting events found dispatching network-vif-plugged-91a80978-9bf2-4ee0-95e7-500f762bec77 {{(pid=69367) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 700.726721] env[69367]: WARNING nova.compute.manager [req-5771e117-d300-41e5-975a-9ca2a52ab601 req-8cf1bc78-fad4-4628-92a4-1624b8d4f29a service nova] [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] Received unexpected event network-vif-plugged-91a80978-9bf2-4ee0-95e7-500f762bec77 for instance with vm_state building and task_state spawning. [ 700.865580] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73c91cbc-148d-4925-b825-d21f40dbcf81 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.873993] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3d851dd-b920-417e-be3d-e0989ef0a559 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.907414] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c369767e-bb1d-4608-b98e-bdf4f7b1a63d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.920404] env[69367]: DEBUG nova.compute.manager [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] [instance: bdc0938b-60ef-463a-b3fd-1754f38a3b79] Start building block device mappings for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 700.924982] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab2a20c5-0ff4-495e-8d60-8f1dcc47cf5e {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.939980] env[69367]: DEBUG nova.compute.provider_tree [None req-39e01228-3e3c-4114-afc9-ee22a5731ffc tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Inventory has not changed in ProviderTree for provider: 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 700.985274] env[69367]: DEBUG oslo_concurrency.lockutils [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Acquiring lock "refresh_cache-10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 700.985625] env[69367]: DEBUG oslo_concurrency.lockutils [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Acquired lock "refresh_cache-10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 700.985625] env[69367]: DEBUG nova.network.neutron [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 701.321472] env[69367]: DEBUG nova.network.neutron [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] [instance: bdc0938b-60ef-463a-b3fd-1754f38a3b79] Successfully created port: 975c2dda-8218-4379-9269-8b01752fffff {{(pid=69367) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 701.446885] env[69367]: DEBUG nova.scheduler.client.report [None req-39e01228-3e3c-4114-afc9-ee22a5731ffc tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Inventory has not changed for provider 19ddf8be-7305-4f70-8366-52a9957232e6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 701.663843] env[69367]: DEBUG oslo_concurrency.lockutils [None req-89f1736f-ef76-4bc5-98e1-36367045736e tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Acquiring lock "10419c72-9876-45d3-a941-46464b47fddc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 701.665064] env[69367]: DEBUG oslo_concurrency.lockutils [None req-89f1736f-ef76-4bc5-98e1-36367045736e tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Lock "10419c72-9876-45d3-a941-46464b47fddc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 701.708831] env[69367]: DEBUG nova.network.neutron [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 701.940651] env[69367]: DEBUG nova.compute.manager [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] [instance: bdc0938b-60ef-463a-b3fd-1754f38a3b79] Start spawning the instance on the hypervisor. {{(pid=69367) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 701.954019] env[69367]: DEBUG oslo_concurrency.lockutils [None req-39e01228-3e3c-4114-afc9-ee22a5731ffc tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.057s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 701.955675] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.609s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 701.958138] env[69367]: INFO nova.compute.claims [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 701.974993] env[69367]: DEBUG nova.virt.hardware [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-19T12:49:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-19T12:49:28Z,direct_url=,disk_format='vmdk',id=2b099420-9152-4d93-9609-4c9317824c11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cd7c200d5cd6461fb951580f8c764c42',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-19T12:49:29Z,virtual_size=,visibility=), allow threads: False {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 701.975708] env[69367]: DEBUG nova.virt.hardware [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Flavor limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 701.975708] env[69367]: DEBUG nova.virt.hardware [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Image limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 701.976089] env[69367]: DEBUG nova.virt.hardware [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Flavor pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 701.976405] env[69367]: DEBUG nova.virt.hardware [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Image pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 701.976591] env[69367]: DEBUG nova.virt.hardware [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 701.976801] env[69367]: DEBUG nova.virt.hardware [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 701.976957] env[69367]: DEBUG nova.virt.hardware [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 701.977282] env[69367]: DEBUG nova.virt.hardware [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Got 1 possible topologies {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 701.978616] env[69367]: DEBUG nova.virt.hardware [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 701.978616] env[69367]: DEBUG nova.virt.hardware [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 701.978616] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38e467ea-0bdb-4142-9b15-a7cc8fa35229 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.988489] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c49def28-0bcf-4ec6-ae59-941de15236e9 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.993524] env[69367]: INFO nova.scheduler.client.report [None req-39e01228-3e3c-4114-afc9-ee22a5731ffc tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Deleted allocations for instance ba4d981a-19f7-41ef-b7d1-a3f3830fe725 [ 702.176159] env[69367]: DEBUG nova.network.neutron [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] Updating instance_info_cache with network_info: [{"id": "91a80978-9bf2-4ee0-95e7-500f762bec77", "address": "fa:16:3e:b4:e0:e4", "network": {"id": "4a66642c-bfaf-4551-908d-cace045330bb", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1782575934-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "52a42805c17544ae8ad875002f133985", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1d25020-c621-4388-ac1d-de55bfefbe50", "external-id": "nsx-vlan-transportzone-573", "segmentation_id": 573, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91a80978-9b", "ovs_interfaceid": "91a80978-9bf2-4ee0-95e7-500f762bec77", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 702.504035] env[69367]: DEBUG oslo_concurrency.lockutils [None req-39e01228-3e3c-4114-afc9-ee22a5731ffc tempest-TenantUsagesTestJSON-783521288 tempest-TenantUsagesTestJSON-783521288-project-member] Lock "ba4d981a-19f7-41ef-b7d1-a3f3830fe725" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.240s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 702.678719] env[69367]: DEBUG oslo_concurrency.lockutils [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Releasing lock "refresh_cache-10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 702.682108] env[69367]: DEBUG nova.compute.manager [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] Instance network_info: |[{"id": "91a80978-9bf2-4ee0-95e7-500f762bec77", "address": "fa:16:3e:b4:e0:e4", "network": {"id": "4a66642c-bfaf-4551-908d-cace045330bb", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1782575934-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "52a42805c17544ae8ad875002f133985", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1d25020-c621-4388-ac1d-de55bfefbe50", "external-id": "nsx-vlan-transportzone-573", "segmentation_id": 573, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91a80978-9b", "ovs_interfaceid": "91a80978-9bf2-4ee0-95e7-500f762bec77", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69367) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 702.682273] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b4:e0:e4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e1d25020-c621-4388-ac1d-de55bfefbe50', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '91a80978-9bf2-4ee0-95e7-500f762bec77', 'vif_model': 'vmxnet3'}] {{(pid=69367) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 702.689563] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Creating folder: Project (52a42805c17544ae8ad875002f133985). Parent ref: group-v837645. {{(pid=69367) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 702.690250] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bb9b9e0c-f163-4d46-936c-ad4d6c11384c {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.707026] env[69367]: INFO nova.virt.vmwareapi.vm_util [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Created folder: Project (52a42805c17544ae8ad875002f133985) in parent group-v837645. [ 702.707026] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Creating folder: Instances. Parent ref: group-v837700. {{(pid=69367) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 702.707026] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d982a545-3f88-41a0-9c08-3d5c55395a29 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.721022] env[69367]: INFO nova.virt.vmwareapi.vm_util [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Created folder: Instances in parent group-v837700. [ 702.721022] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 702.721022] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] Creating VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 702.721022] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-77f034a5-832e-48c8-9780-5e9c220ac8b5 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.747709] env[69367]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 702.747709] env[69367]: value = "task-4233879" [ 702.747709] env[69367]: _type = "Task" [ 702.747709] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.757075] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4233879, 'name': CreateVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.879524] env[69367]: DEBUG nova.compute.manager [req-c3630e4a-f594-46fa-b46c-07d8d4ecb900 req-b9d2d7fe-b868-4024-b7a1-4427feab1f80 service nova] [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] Received event network-changed-91a80978-9bf2-4ee0-95e7-500f762bec77 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 702.879798] env[69367]: DEBUG nova.compute.manager [req-c3630e4a-f594-46fa-b46c-07d8d4ecb900 req-b9d2d7fe-b868-4024-b7a1-4427feab1f80 service nova] [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] Refreshing instance network info cache due to event network-changed-91a80978-9bf2-4ee0-95e7-500f762bec77. {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11772}} [ 702.881949] env[69367]: DEBUG oslo_concurrency.lockutils [req-c3630e4a-f594-46fa-b46c-07d8d4ecb900 req-b9d2d7fe-b868-4024-b7a1-4427feab1f80 service nova] Acquiring lock "refresh_cache-10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 702.881949] env[69367]: DEBUG oslo_concurrency.lockutils [req-c3630e4a-f594-46fa-b46c-07d8d4ecb900 req-b9d2d7fe-b868-4024-b7a1-4427feab1f80 service nova] Acquired lock "refresh_cache-10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 702.882455] env[69367]: DEBUG nova.network.neutron [req-c3630e4a-f594-46fa-b46c-07d8d4ecb900 req-b9d2d7fe-b868-4024-b7a1-4427feab1f80 service nova] [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] Refreshing network info cache for port 91a80978-9bf2-4ee0-95e7-500f762bec77 {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 703.259596] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4233879, 'name': CreateVM_Task, 'duration_secs': 0.375628} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.264900] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] Created VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 703.264900] env[69367]: DEBUG oslo_concurrency.lockutils [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 703.264900] env[69367]: DEBUG oslo_concurrency.lockutils [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 703.265231] env[69367]: DEBUG oslo_concurrency.lockutils [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 703.265563] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b997718a-c89f-49c8-9eb3-d099f9fb883d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.274189] env[69367]: DEBUG oslo_vmware.api [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Waiting for the task: (returnval){ [ 703.274189] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]525180b0-d112-b68b-82dd-b7fdf701ca51" [ 703.274189] env[69367]: _type = "Task" [ 703.274189] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 703.287540] env[69367]: DEBUG oslo_vmware.api [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]525180b0-d112-b68b-82dd-b7fdf701ca51, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.508907] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8c4163a-8c29-4c20-b22d-6d8e1c6560b1 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.520953] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-107b7d27-0eef-413c-8c3b-bab127496a9e {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.564636] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81b0dea3-8bb5-44b6-b02d-6c2075791f99 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.573050] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54da8acf-16b9-45ee-ba2a-446a563937b2 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.591153] env[69367]: DEBUG nova.compute.provider_tree [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Inventory has not changed in ProviderTree for provider: 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 703.789318] env[69367]: DEBUG oslo_vmware.api [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]525180b0-d112-b68b-82dd-b7fdf701ca51, 'name': SearchDatastore_Task, 'duration_secs': 0.018692} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.789708] env[69367]: DEBUG oslo_concurrency.lockutils [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 703.790045] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] Processing image 2b099420-9152-4d93-9609-4c9317824c11 {{(pid=69367) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 703.790394] env[69367]: DEBUG oslo_concurrency.lockutils [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 703.790593] env[69367]: DEBUG oslo_concurrency.lockutils [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 703.790909] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 703.791269] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cbeccb71-c43d-4f6c-a997-5216b3b212da {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.801391] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 703.801671] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69367) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 703.802530] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c42409c3-bebc-4d9a-b20d-9d226a07647f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.809611] env[69367]: DEBUG oslo_vmware.api [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Waiting for the task: (returnval){ [ 703.809611] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52a634fa-e922-6215-4ec2-a0dd6846a190" [ 703.809611] env[69367]: _type = "Task" [ 703.809611] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 703.819637] env[69367]: DEBUG oslo_vmware.api [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52a634fa-e922-6215-4ec2-a0dd6846a190, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.094170] env[69367]: DEBUG nova.scheduler.client.report [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Inventory has not changed for provider 19ddf8be-7305-4f70-8366-52a9957232e6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 704.324924] env[69367]: DEBUG oslo_vmware.api [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52a634fa-e922-6215-4ec2-a0dd6846a190, 'name': SearchDatastore_Task, 'duration_secs': 0.010491} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 704.325764] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-94777524-9cac-406c-a7ba-664fa05492ca {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.332111] env[69367]: DEBUG oslo_vmware.api [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Waiting for the task: (returnval){ [ 704.332111] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52adf1da-576c-a268-7f84-c08d73136eaa" [ 704.332111] env[69367]: _type = "Task" [ 704.332111] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 704.343196] env[69367]: DEBUG oslo_vmware.api [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52adf1da-576c-a268-7f84-c08d73136eaa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.555238] env[69367]: DEBUG nova.network.neutron [req-c3630e4a-f594-46fa-b46c-07d8d4ecb900 req-b9d2d7fe-b868-4024-b7a1-4427feab1f80 service nova] [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] Updated VIF entry in instance network info cache for port 91a80978-9bf2-4ee0-95e7-500f762bec77. {{(pid=69367) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 704.555587] env[69367]: DEBUG nova.network.neutron [req-c3630e4a-f594-46fa-b46c-07d8d4ecb900 req-b9d2d7fe-b868-4024-b7a1-4427feab1f80 service nova] [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] Updating instance_info_cache with network_info: [{"id": "91a80978-9bf2-4ee0-95e7-500f762bec77", "address": "fa:16:3e:b4:e0:e4", "network": {"id": "4a66642c-bfaf-4551-908d-cace045330bb", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1782575934-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "52a42805c17544ae8ad875002f133985", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1d25020-c621-4388-ac1d-de55bfefbe50", "external-id": "nsx-vlan-transportzone-573", "segmentation_id": 573, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91a80978-9b", "ovs_interfaceid": "91a80978-9bf2-4ee0-95e7-500f762bec77", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 704.601957] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.646s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 704.602530] env[69367]: DEBUG nova.compute.manager [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] Start building networks asynchronously for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 704.609085] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2da95dcb-8d53-4ea0-a385-7bc1951f8313 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.483s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 704.609403] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2da95dcb-8d53-4ea0-a385-7bc1951f8313 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.004s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 704.609600] env[69367]: INFO nova.compute.manager [None req-2da95dcb-8d53-4ea0-a385-7bc1951f8313 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] [instance: 4e346ed1-36e9-421d-975f-e8bb6f05c0a0] Successfully reverted task state from None on failure for instance. [ 704.614117] env[69367]: DEBUG oslo_concurrency.lockutils [None req-b74dbd34-b054-442c-9c38-e89bb046091c tempest-AttachInterfacesUnderV243Test-977744864 tempest-AttachInterfacesUnderV243Test-977744864-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.316s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 704.616158] env[69367]: INFO nova.compute.claims [None req-b74dbd34-b054-442c-9c38-e89bb046091c tempest-AttachInterfacesUnderV243Test-977744864 tempest-AttachInterfacesUnderV243Test-977744864-project-member] [instance: 250a50bf-c4b0-4997-9ce5-6dbeb617e9ed] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 704.623433] env[69367]: ERROR oslo_messaging.rpc.server [None req-2da95dcb-8d53-4ea0-a385-7bc1951f8313 tempest-ServersAdmin275Test-256247193 tempest-ServersAdmin275Test-256247193-project-member] Exception during message handling: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 704.623433] env[69367]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 704.623433] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 704.623433] env[69367]: ERROR oslo_messaging.rpc.server yield [ 704.623433] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 704.623433] env[69367]: ERROR oslo_messaging.rpc.server self.set_inventory_for_provider( [ 704.623433] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 704.623433] env[69367]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 704.623433] env[69367]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-7d30141e-169b-4157-a035-edc102a94e96"}]} [ 704.623433] env[69367]: ERROR oslo_messaging.rpc.server [ 704.623902] env[69367]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 704.623902] env[69367]: ERROR oslo_messaging.rpc.server [ 704.623902] env[69367]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 704.623902] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 174, in _process_incoming [ 704.623902] env[69367]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 704.623902] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 704.623902] env[69367]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 704.623902] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 704.623902] env[69367]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 704.623902] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 704.623902] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 704.623902] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 704.623902] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 704.623902] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 704.623902] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 704.623902] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 704.623902] env[69367]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 704.623902] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 167, in decorated_function [ 704.625511] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 704.625511] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 704.625511] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 704.625511] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 704.625511] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 704.625511] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 158, in decorated_function [ 704.625511] env[69367]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 704.625511] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1483, in decorated_function [ 704.625511] env[69367]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 704.625511] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 214, in decorated_function [ 704.625511] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 704.625511] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 704.625511] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 704.625511] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 704.625511] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 704.625511] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 204, in decorated_function [ 704.625511] env[69367]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 704.625511] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3388, in terminate_instance [ 704.626597] env[69367]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 704.626597] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 704.626597] env[69367]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 704.626597] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3383, in do_terminate_instance [ 704.626597] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 704.626597] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 704.626597] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 704.626597] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 704.626597] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 704.626597] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 704.626597] env[69367]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 704.626597] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 704.626597] env[69367]: ERROR oslo_messaging.rpc.server self._complete_deletion(context, instance) [ 704.626597] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 704.626597] env[69367]: ERROR oslo_messaging.rpc.server self._update_resource_tracker(context, instance) [ 704.626597] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 704.626597] env[69367]: ERROR oslo_messaging.rpc.server self.rt.update_usage(context, instance, instance.node) [ 704.626597] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 704.627428] env[69367]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 704.627428] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 704.627428] env[69367]: ERROR oslo_messaging.rpc.server self._update(context.elevated(), self.compute_nodes[nodename]) [ 704.627428] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 704.627428] env[69367]: ERROR oslo_messaging.rpc.server self._update_to_placement(context, compute_node, startup) [ 704.627428] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 704.627428] env[69367]: ERROR oslo_messaging.rpc.server return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 704.627428] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 704.627428] env[69367]: ERROR oslo_messaging.rpc.server return attempt.get(self._wrap_exception) [ 704.627428] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 704.627428] env[69367]: ERROR oslo_messaging.rpc.server six.reraise(self.value[0], self.value[1], self.value[2]) [ 704.627428] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 704.627428] env[69367]: ERROR oslo_messaging.rpc.server raise value [ 704.627428] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 704.627428] env[69367]: ERROR oslo_messaging.rpc.server attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 704.627428] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 704.627428] env[69367]: ERROR oslo_messaging.rpc.server self.reportclient.update_from_provider_tree( [ 704.628390] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 704.628390] env[69367]: ERROR oslo_messaging.rpc.server with catch_all(pd.uuid): [ 704.628390] env[69367]: ERROR oslo_messaging.rpc.server File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 704.628390] env[69367]: ERROR oslo_messaging.rpc.server self.gen.throw(typ, value, traceback) [ 704.628390] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 704.628390] env[69367]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderSyncFailed() [ 704.628390] env[69367]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 704.628390] env[69367]: ERROR oslo_messaging.rpc.server [ 704.845316] env[69367]: DEBUG oslo_vmware.api [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52adf1da-576c-a268-7f84-c08d73136eaa, 'name': SearchDatastore_Task, 'duration_secs': 0.013128} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 704.845594] env[69367]: DEBUG oslo_concurrency.lockutils [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 704.845853] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore2] 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57/10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 704.846134] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cf9287ae-de4e-4932-9747-1465ad4e44f6 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.854629] env[69367]: DEBUG oslo_vmware.api [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Waiting for the task: (returnval){ [ 704.854629] env[69367]: value = "task-4233880" [ 704.854629] env[69367]: _type = "Task" [ 704.854629] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 704.867847] env[69367]: DEBUG oslo_vmware.api [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Task: {'id': task-4233880, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.059042] env[69367]: DEBUG oslo_concurrency.lockutils [req-c3630e4a-f594-46fa-b46c-07d8d4ecb900 req-b9d2d7fe-b868-4024-b7a1-4427feab1f80 service nova] Releasing lock "refresh_cache-10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 705.130236] env[69367]: DEBUG nova.compute.utils [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Using /dev/sd instead of None {{(pid=69367) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 705.131794] env[69367]: DEBUG nova.compute.manager [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] Allocating IP information in the background. {{(pid=69367) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 705.132039] env[69367]: DEBUG nova.network.neutron [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] allocate_for_instance() {{(pid=69367) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 705.203777] env[69367]: DEBUG nova.compute.manager [req-3cba17a7-30ee-4171-9568-12b226b604e7 req-36460497-5276-4737-af2a-a7f491db0f6d service nova] [instance: bdc0938b-60ef-463a-b3fd-1754f38a3b79] Received event network-vif-plugged-975c2dda-8218-4379-9269-8b01752fffff {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 705.204060] env[69367]: DEBUG oslo_concurrency.lockutils [req-3cba17a7-30ee-4171-9568-12b226b604e7 req-36460497-5276-4737-af2a-a7f491db0f6d service nova] Acquiring lock "bdc0938b-60ef-463a-b3fd-1754f38a3b79-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 705.204314] env[69367]: DEBUG oslo_concurrency.lockutils [req-3cba17a7-30ee-4171-9568-12b226b604e7 req-36460497-5276-4737-af2a-a7f491db0f6d service nova] Lock "bdc0938b-60ef-463a-b3fd-1754f38a3b79-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 705.204617] env[69367]: DEBUG oslo_concurrency.lockutils [req-3cba17a7-30ee-4171-9568-12b226b604e7 req-36460497-5276-4737-af2a-a7f491db0f6d service nova] Lock "bdc0938b-60ef-463a-b3fd-1754f38a3b79-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 705.207097] env[69367]: DEBUG nova.compute.manager [req-3cba17a7-30ee-4171-9568-12b226b604e7 req-36460497-5276-4737-af2a-a7f491db0f6d service nova] [instance: bdc0938b-60ef-463a-b3fd-1754f38a3b79] No waiting events found dispatching network-vif-plugged-975c2dda-8218-4379-9269-8b01752fffff {{(pid=69367) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 705.207097] env[69367]: WARNING nova.compute.manager [req-3cba17a7-30ee-4171-9568-12b226b604e7 req-36460497-5276-4737-af2a-a7f491db0f6d service nova] [instance: bdc0938b-60ef-463a-b3fd-1754f38a3b79] Received unexpected event network-vif-plugged-975c2dda-8218-4379-9269-8b01752fffff for instance with vm_state building and task_state spawning. [ 705.308535] env[69367]: DEBUG nova.network.neutron [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] [instance: bdc0938b-60ef-463a-b3fd-1754f38a3b79] Successfully updated port: 975c2dda-8218-4379-9269-8b01752fffff {{(pid=69367) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 705.351196] env[69367]: DEBUG nova.policy [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2957aecc3e2f49019e509ae1d92038be', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2375d6603eef45069be4a3541519002a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69367) authorize /opt/stack/nova/nova/policy.py:192}} [ 705.368398] env[69367]: DEBUG oslo_vmware.api [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Task: {'id': task-4233880, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.638201] env[69367]: DEBUG nova.compute.manager [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] Start building block device mappings for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 705.814880] env[69367]: DEBUG oslo_concurrency.lockutils [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Acquiring lock "refresh_cache-bdc0938b-60ef-463a-b3fd-1754f38a3b79" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 705.815074] env[69367]: DEBUG oslo_concurrency.lockutils [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Acquired lock "refresh_cache-bdc0938b-60ef-463a-b3fd-1754f38a3b79" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 705.815254] env[69367]: DEBUG nova.network.neutron [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] [instance: bdc0938b-60ef-463a-b3fd-1754f38a3b79] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 705.870385] env[69367]: DEBUG oslo_vmware.api [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Task: {'id': task-4233880, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.555368} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 705.870912] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore2] 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57/10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 705.870912] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] Extending root virtual disk to 1048576 {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 705.871490] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2b507421-8b9f-4eaf-bc74-d88f4a0e8a41 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.883441] env[69367]: DEBUG oslo_vmware.api [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Waiting for the task: (returnval){ [ 705.883441] env[69367]: value = "task-4233881" [ 705.883441] env[69367]: _type = "Task" [ 705.883441] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.896579] env[69367]: DEBUG oslo_vmware.api [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Task: {'id': task-4233881, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.160774] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19146eb9-71e9-4e09-ba55-a9c3af38a0df {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.172419] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-911e1164-7129-4c6b-bfab-a0c45a558ad3 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.208446] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e9e06ed-efae-4581-96b7-28d95042557c {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.218458] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-538423fb-c56a-4ee0-9eb9-4ae1151074fb {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.237670] env[69367]: DEBUG nova.compute.provider_tree [None req-b74dbd34-b054-442c-9c38-e89bb046091c tempest-AttachInterfacesUnderV243Test-977744864 tempest-AttachInterfacesUnderV243Test-977744864-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 706.395190] env[69367]: DEBUG oslo_vmware.api [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Task: {'id': task-4233881, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073282} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.395443] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] Extended root virtual disk {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 706.396284] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-560679c7-faef-44d6-bec0-888d7e8e31ac {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.403771] env[69367]: DEBUG nova.network.neutron [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] [instance: bdc0938b-60ef-463a-b3fd-1754f38a3b79] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 706.425434] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] Reconfiguring VM instance instance-00000020 to attach disk [datastore2] 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57/10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 706.425434] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-22453b7c-2a09-4c53-bea0-024410ed3c2b {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.443886] env[69367]: DEBUG nova.network.neutron [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] Successfully created port: df0ab2ea-5a93-4792-b937-b8327787a7b4 {{(pid=69367) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 706.454292] env[69367]: DEBUG oslo_vmware.api [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Waiting for the task: (returnval){ [ 706.454292] env[69367]: value = "task-4233882" [ 706.454292] env[69367]: _type = "Task" [ 706.454292] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.465200] env[69367]: DEBUG oslo_vmware.api [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Task: {'id': task-4233882, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.649780] env[69367]: DEBUG nova.compute.manager [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] Start spawning the instance on the hypervisor. {{(pid=69367) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 706.679927] env[69367]: DEBUG nova.virt.hardware [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-19T12:49:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-19T12:49:28Z,direct_url=,disk_format='vmdk',id=2b099420-9152-4d93-9609-4c9317824c11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cd7c200d5cd6461fb951580f8c764c42',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-19T12:49:29Z,virtual_size=,visibility=), allow threads: False {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 706.680632] env[69367]: DEBUG nova.virt.hardware [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Flavor limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 706.680632] env[69367]: DEBUG nova.virt.hardware [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Image limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 706.680632] env[69367]: DEBUG nova.virt.hardware [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Flavor pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 706.680731] env[69367]: DEBUG nova.virt.hardware [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Image pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 706.680845] env[69367]: DEBUG nova.virt.hardware [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 706.681095] env[69367]: DEBUG nova.virt.hardware [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 706.681269] env[69367]: DEBUG nova.virt.hardware [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 706.681466] env[69367]: DEBUG nova.virt.hardware [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Got 1 possible topologies {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 706.681646] env[69367]: DEBUG nova.virt.hardware [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 706.681828] env[69367]: DEBUG nova.virt.hardware [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 706.682808] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14ebb444-ff08-4178-b98a-3dc5807502ed {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.692515] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03e2ea23-b1e4-4c6d-9c47-b31ce5b5fa25 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.769784] env[69367]: ERROR nova.scheduler.client.report [None req-b74dbd34-b054-442c-9c38-e89bb046091c tempest-AttachInterfacesUnderV243Test-977744864 tempest-AttachInterfacesUnderV243Test-977744864-project-member] [req-9b8205f5-6d1e-4757-95c3-5b94f2ee389b] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-9b8205f5-6d1e-4757-95c3-5b94f2ee389b"}]} [ 706.770190] env[69367]: DEBUG oslo_concurrency.lockutils [None req-b74dbd34-b054-442c-9c38-e89bb046091c tempest-AttachInterfacesUnderV243Test-977744864 tempest-AttachInterfacesUnderV243Test-977744864-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.158s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 706.770769] env[69367]: ERROR nova.compute.manager [None req-b74dbd34-b054-442c-9c38-e89bb046091c tempest-AttachInterfacesUnderV243Test-977744864 tempest-AttachInterfacesUnderV243Test-977744864-project-member] [instance: 250a50bf-c4b0-4997-9ce5-6dbeb617e9ed] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 706.770769] env[69367]: ERROR nova.compute.manager [instance: 250a50bf-c4b0-4997-9ce5-6dbeb617e9ed] Traceback (most recent call last): [ 706.770769] env[69367]: ERROR nova.compute.manager [instance: 250a50bf-c4b0-4997-9ce5-6dbeb617e9ed] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 706.770769] env[69367]: ERROR nova.compute.manager [instance: 250a50bf-c4b0-4997-9ce5-6dbeb617e9ed] yield [ 706.770769] env[69367]: ERROR nova.compute.manager [instance: 250a50bf-c4b0-4997-9ce5-6dbeb617e9ed] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 706.770769] env[69367]: ERROR nova.compute.manager [instance: 250a50bf-c4b0-4997-9ce5-6dbeb617e9ed] self.set_inventory_for_provider( [ 706.770769] env[69367]: ERROR nova.compute.manager [instance: 250a50bf-c4b0-4997-9ce5-6dbeb617e9ed] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 706.770769] env[69367]: ERROR nova.compute.manager [instance: 250a50bf-c4b0-4997-9ce5-6dbeb617e9ed] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 706.770995] env[69367]: ERROR nova.compute.manager [instance: 250a50bf-c4b0-4997-9ce5-6dbeb617e9ed] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-9b8205f5-6d1e-4757-95c3-5b94f2ee389b"}]} [ 706.770995] env[69367]: ERROR nova.compute.manager [instance: 250a50bf-c4b0-4997-9ce5-6dbeb617e9ed] [ 706.770995] env[69367]: ERROR nova.compute.manager [instance: 250a50bf-c4b0-4997-9ce5-6dbeb617e9ed] During handling of the above exception, another exception occurred: [ 706.770995] env[69367]: ERROR nova.compute.manager [instance: 250a50bf-c4b0-4997-9ce5-6dbeb617e9ed] [ 706.770995] env[69367]: ERROR nova.compute.manager [instance: 250a50bf-c4b0-4997-9ce5-6dbeb617e9ed] Traceback (most recent call last): [ 706.770995] env[69367]: ERROR nova.compute.manager [instance: 250a50bf-c4b0-4997-9ce5-6dbeb617e9ed] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 706.770995] env[69367]: ERROR nova.compute.manager [instance: 250a50bf-c4b0-4997-9ce5-6dbeb617e9ed] with self.rt.instance_claim(context, instance, node, allocs, [ 706.770995] env[69367]: ERROR nova.compute.manager [instance: 250a50bf-c4b0-4997-9ce5-6dbeb617e9ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 706.770995] env[69367]: ERROR nova.compute.manager [instance: 250a50bf-c4b0-4997-9ce5-6dbeb617e9ed] return f(*args, **kwargs) [ 706.771244] env[69367]: ERROR nova.compute.manager [instance: 250a50bf-c4b0-4997-9ce5-6dbeb617e9ed] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 706.771244] env[69367]: ERROR nova.compute.manager [instance: 250a50bf-c4b0-4997-9ce5-6dbeb617e9ed] self._update(elevated, cn) [ 706.771244] env[69367]: ERROR nova.compute.manager [instance: 250a50bf-c4b0-4997-9ce5-6dbeb617e9ed] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 706.771244] env[69367]: ERROR nova.compute.manager [instance: 250a50bf-c4b0-4997-9ce5-6dbeb617e9ed] self._update_to_placement(context, compute_node, startup) [ 706.771244] env[69367]: ERROR nova.compute.manager [instance: 250a50bf-c4b0-4997-9ce5-6dbeb617e9ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 706.771244] env[69367]: ERROR nova.compute.manager [instance: 250a50bf-c4b0-4997-9ce5-6dbeb617e9ed] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 706.771244] env[69367]: ERROR nova.compute.manager [instance: 250a50bf-c4b0-4997-9ce5-6dbeb617e9ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 706.771244] env[69367]: ERROR nova.compute.manager [instance: 250a50bf-c4b0-4997-9ce5-6dbeb617e9ed] return attempt.get(self._wrap_exception) [ 706.771244] env[69367]: ERROR nova.compute.manager [instance: 250a50bf-c4b0-4997-9ce5-6dbeb617e9ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 706.771244] env[69367]: ERROR nova.compute.manager [instance: 250a50bf-c4b0-4997-9ce5-6dbeb617e9ed] six.reraise(self.value[0], self.value[1], self.value[2]) [ 706.771244] env[69367]: ERROR nova.compute.manager [instance: 250a50bf-c4b0-4997-9ce5-6dbeb617e9ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 706.771244] env[69367]: ERROR nova.compute.manager [instance: 250a50bf-c4b0-4997-9ce5-6dbeb617e9ed] raise value [ 706.771244] env[69367]: ERROR nova.compute.manager [instance: 250a50bf-c4b0-4997-9ce5-6dbeb617e9ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 706.771526] env[69367]: ERROR nova.compute.manager [instance: 250a50bf-c4b0-4997-9ce5-6dbeb617e9ed] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 706.771526] env[69367]: ERROR nova.compute.manager [instance: 250a50bf-c4b0-4997-9ce5-6dbeb617e9ed] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 706.771526] env[69367]: ERROR nova.compute.manager [instance: 250a50bf-c4b0-4997-9ce5-6dbeb617e9ed] self.reportclient.update_from_provider_tree( [ 706.771526] env[69367]: ERROR nova.compute.manager [instance: 250a50bf-c4b0-4997-9ce5-6dbeb617e9ed] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 706.771526] env[69367]: ERROR nova.compute.manager [instance: 250a50bf-c4b0-4997-9ce5-6dbeb617e9ed] with catch_all(pd.uuid): [ 706.771526] env[69367]: ERROR nova.compute.manager [instance: 250a50bf-c4b0-4997-9ce5-6dbeb617e9ed] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 706.771526] env[69367]: ERROR nova.compute.manager [instance: 250a50bf-c4b0-4997-9ce5-6dbeb617e9ed] self.gen.throw(typ, value, traceback) [ 706.771526] env[69367]: ERROR nova.compute.manager [instance: 250a50bf-c4b0-4997-9ce5-6dbeb617e9ed] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 706.771526] env[69367]: ERROR nova.compute.manager [instance: 250a50bf-c4b0-4997-9ce5-6dbeb617e9ed] raise exception.ResourceProviderSyncFailed() [ 706.771526] env[69367]: ERROR nova.compute.manager [instance: 250a50bf-c4b0-4997-9ce5-6dbeb617e9ed] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 706.771526] env[69367]: ERROR nova.compute.manager [instance: 250a50bf-c4b0-4997-9ce5-6dbeb617e9ed] [ 706.771778] env[69367]: DEBUG nova.compute.utils [None req-b74dbd34-b054-442c-9c38-e89bb046091c tempest-AttachInterfacesUnderV243Test-977744864 tempest-AttachInterfacesUnderV243Test-977744864-project-member] [instance: 250a50bf-c4b0-4997-9ce5-6dbeb617e9ed] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 706.772970] env[69367]: DEBUG oslo_concurrency.lockutils [None req-785c96af-d561-41aa-8b94-aba2c186e78c tempest-ServerActionsTestOtherA-1360477532 tempest-ServerActionsTestOtherA-1360477532-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.525s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 706.777017] env[69367]: INFO nova.compute.claims [None req-785c96af-d561-41aa-8b94-aba2c186e78c tempest-ServerActionsTestOtherA-1360477532 tempest-ServerActionsTestOtherA-1360477532-project-member] [instance: 26418f26-07ae-45e4-87d6-bdcf99674fb5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 706.777561] env[69367]: DEBUG nova.compute.manager [None req-b74dbd34-b054-442c-9c38-e89bb046091c tempest-AttachInterfacesUnderV243Test-977744864 tempest-AttachInterfacesUnderV243Test-977744864-project-member] [instance: 250a50bf-c4b0-4997-9ce5-6dbeb617e9ed] Build of instance 250a50bf-c4b0-4997-9ce5-6dbeb617e9ed was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 706.778188] env[69367]: DEBUG nova.compute.manager [None req-b74dbd34-b054-442c-9c38-e89bb046091c tempest-AttachInterfacesUnderV243Test-977744864 tempest-AttachInterfacesUnderV243Test-977744864-project-member] [instance: 250a50bf-c4b0-4997-9ce5-6dbeb617e9ed] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 706.778405] env[69367]: DEBUG oslo_concurrency.lockutils [None req-b74dbd34-b054-442c-9c38-e89bb046091c tempest-AttachInterfacesUnderV243Test-977744864 tempest-AttachInterfacesUnderV243Test-977744864-project-member] Acquiring lock "refresh_cache-250a50bf-c4b0-4997-9ce5-6dbeb617e9ed" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 706.778620] env[69367]: DEBUG oslo_concurrency.lockutils [None req-b74dbd34-b054-442c-9c38-e89bb046091c tempest-AttachInterfacesUnderV243Test-977744864 tempest-AttachInterfacesUnderV243Test-977744864-project-member] Acquired lock "refresh_cache-250a50bf-c4b0-4997-9ce5-6dbeb617e9ed" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 706.778824] env[69367]: DEBUG nova.network.neutron [None req-b74dbd34-b054-442c-9c38-e89bb046091c tempest-AttachInterfacesUnderV243Test-977744864 tempest-AttachInterfacesUnderV243Test-977744864-project-member] [instance: 250a50bf-c4b0-4997-9ce5-6dbeb617e9ed] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 706.810849] env[69367]: DEBUG nova.network.neutron [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] [instance: bdc0938b-60ef-463a-b3fd-1754f38a3b79] Updating instance_info_cache with network_info: [{"id": "975c2dda-8218-4379-9269-8b01752fffff", "address": "fa:16:3e:ea:af:52", "network": {"id": "bccc5cc6-71cd-4de1-b0c6-1342e5f21bff", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1889011946-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "24da0b94570b4204903f829e6292cbca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "022c7dd5-6c13-49c7-84f4-8b6c1fda4fb7", "external-id": "nsx-vlan-transportzone-694", "segmentation_id": 694, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap975c2dda-82", "ovs_interfaceid": "975c2dda-8218-4379-9269-8b01752fffff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 706.969601] env[69367]: DEBUG oslo_vmware.api [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Task: {'id': task-4233882, 'name': ReconfigVM_Task, 'duration_secs': 0.313674} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.969601] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] Reconfigured VM instance instance-00000020 to attach disk [datastore2] 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57/10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 706.970442] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ee4e131b-fc52-4731-9b05-7ceea269c478 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.981642] env[69367]: DEBUG oslo_vmware.api [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Waiting for the task: (returnval){ [ 706.981642] env[69367]: value = "task-4233883" [ 706.981642] env[69367]: _type = "Task" [ 706.981642] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.993721] env[69367]: DEBUG oslo_vmware.api [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Task: {'id': task-4233883, 'name': Rename_Task} progress is 5%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.315214] env[69367]: DEBUG oslo_concurrency.lockutils [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Releasing lock "refresh_cache-bdc0938b-60ef-463a-b3fd-1754f38a3b79" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 707.315214] env[69367]: DEBUG nova.compute.manager [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] [instance: bdc0938b-60ef-463a-b3fd-1754f38a3b79] Instance network_info: |[{"id": "975c2dda-8218-4379-9269-8b01752fffff", "address": "fa:16:3e:ea:af:52", "network": {"id": "bccc5cc6-71cd-4de1-b0c6-1342e5f21bff", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1889011946-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "24da0b94570b4204903f829e6292cbca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "022c7dd5-6c13-49c7-84f4-8b6c1fda4fb7", "external-id": "nsx-vlan-transportzone-694", "segmentation_id": 694, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap975c2dda-82", "ovs_interfaceid": "975c2dda-8218-4379-9269-8b01752fffff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69367) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 707.315414] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] [instance: bdc0938b-60ef-463a-b3fd-1754f38a3b79] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ea:af:52', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '022c7dd5-6c13-49c7-84f4-8b6c1fda4fb7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '975c2dda-8218-4379-9269-8b01752fffff', 'vif_model': 'vmxnet3'}] {{(pid=69367) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 707.324836] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Creating folder: Project (24da0b94570b4204903f829e6292cbca). Parent ref: group-v837645. {{(pid=69367) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 707.325363] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-86230593-d12c-48da-9d28-c8f5e008ae40 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.340675] env[69367]: INFO nova.virt.vmwareapi.vm_util [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Created folder: Project (24da0b94570b4204903f829e6292cbca) in parent group-v837645. [ 707.341062] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Creating folder: Instances. Parent ref: group-v837703. {{(pid=69367) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 707.342273] env[69367]: DEBUG nova.network.neutron [None req-b74dbd34-b054-442c-9c38-e89bb046091c tempest-AttachInterfacesUnderV243Test-977744864 tempest-AttachInterfacesUnderV243Test-977744864-project-member] [instance: 250a50bf-c4b0-4997-9ce5-6dbeb617e9ed] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 707.344349] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ef46c141-9802-4d5a-a49b-bc9855154c68 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.357148] env[69367]: INFO nova.virt.vmwareapi.vm_util [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Created folder: Instances in parent group-v837703. [ 707.357448] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 707.357669] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bdc0938b-60ef-463a-b3fd-1754f38a3b79] Creating VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 707.357887] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c6a9492d-69e2-4d79-94b3-d5159eb5f207 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.385326] env[69367]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 707.385326] env[69367]: value = "task-4233886" [ 707.385326] env[69367]: _type = "Task" [ 707.385326] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.392754] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4233886, 'name': CreateVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.499977] env[69367]: DEBUG oslo_vmware.api [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Task: {'id': task-4233883, 'name': Rename_Task, 'duration_secs': 0.156131} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.500398] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] Powering on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 707.500398] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-16f851f2-6b37-4e48-b35c-6a213c820274 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.508093] env[69367]: DEBUG oslo_vmware.api [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Waiting for the task: (returnval){ [ 707.508093] env[69367]: value = "task-4233887" [ 707.508093] env[69367]: _type = "Task" [ 707.508093] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.517387] env[69367]: DEBUG oslo_vmware.api [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Task: {'id': task-4233887, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.599749] env[69367]: DEBUG nova.network.neutron [None req-b74dbd34-b054-442c-9c38-e89bb046091c tempest-AttachInterfacesUnderV243Test-977744864 tempest-AttachInterfacesUnderV243Test-977744864-project-member] [instance: 250a50bf-c4b0-4997-9ce5-6dbeb617e9ed] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 707.814367] env[69367]: DEBUG nova.scheduler.client.report [None req-785c96af-d561-41aa-8b94-aba2c186e78c tempest-ServerActionsTestOtherA-1360477532 tempest-ServerActionsTestOtherA-1360477532-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 707.846960] env[69367]: DEBUG nova.scheduler.client.report [None req-785c96af-d561-41aa-8b94-aba2c186e78c tempest-ServerActionsTestOtherA-1360477532 tempest-ServerActionsTestOtherA-1360477532-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 707.846960] env[69367]: DEBUG nova.compute.provider_tree [None req-785c96af-d561-41aa-8b94-aba2c186e78c tempest-ServerActionsTestOtherA-1360477532 tempest-ServerActionsTestOtherA-1360477532-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 707.848852] env[69367]: DEBUG oslo_concurrency.lockutils [None req-cd36cff0-1cf6-4889-b011-15c7764840c6 tempest-InstanceActionsTestJSON-640821445 tempest-InstanceActionsTestJSON-640821445-project-member] Acquiring lock "32ad9bbe-f92c-488d-a98a-d28bbfe8293f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 707.849906] env[69367]: DEBUG oslo_concurrency.lockutils [None req-cd36cff0-1cf6-4889-b011-15c7764840c6 tempest-InstanceActionsTestJSON-640821445 tempest-InstanceActionsTestJSON-640821445-project-member] Lock "32ad9bbe-f92c-488d-a98a-d28bbfe8293f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 707.871228] env[69367]: DEBUG nova.scheduler.client.report [None req-785c96af-d561-41aa-8b94-aba2c186e78c tempest-ServerActionsTestOtherA-1360477532 tempest-ServerActionsTestOtherA-1360477532-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 707.896609] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4233886, 'name': CreateVM_Task, 'duration_secs': 0.409529} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.896899] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bdc0938b-60ef-463a-b3fd-1754f38a3b79] Created VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 707.898075] env[69367]: DEBUG oslo_concurrency.lockutils [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 707.898075] env[69367]: DEBUG oslo_concurrency.lockutils [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 707.898306] env[69367]: DEBUG oslo_concurrency.lockutils [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 707.899127] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-49b30494-1eb0-4d08-ba98-bf1bee57ade2 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.908287] env[69367]: DEBUG oslo_vmware.api [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Waiting for the task: (returnval){ [ 707.908287] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]525e059d-7903-9316-22f8-c25075c95342" [ 707.908287] env[69367]: _type = "Task" [ 707.908287] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.911966] env[69367]: DEBUG nova.scheduler.client.report [None req-785c96af-d561-41aa-8b94-aba2c186e78c tempest-ServerActionsTestOtherA-1360477532 tempest-ServerActionsTestOtherA-1360477532-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 707.924740] env[69367]: DEBUG oslo_vmware.api [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]525e059d-7903-9316-22f8-c25075c95342, 'name': SearchDatastore_Task, 'duration_secs': 0.010752} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.924740] env[69367]: DEBUG oslo_concurrency.lockutils [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 707.924740] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] [instance: bdc0938b-60ef-463a-b3fd-1754f38a3b79] Processing image 2b099420-9152-4d93-9609-4c9317824c11 {{(pid=69367) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 707.925010] env[69367]: DEBUG oslo_concurrency.lockutils [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 707.925239] env[69367]: DEBUG oslo_concurrency.lockutils [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 707.925672] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 707.925944] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-40411f79-49ca-44c4-97b7-785ff5f78689 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.941994] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 707.942213] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69367) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 707.950527] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf6929a6-6d54-4d41-8bd9-da61fb96849a {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.957898] env[69367]: DEBUG oslo_vmware.api [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Waiting for the task: (returnval){ [ 707.957898] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52b05eaf-80da-af90-0e30-de48466abf4c" [ 707.957898] env[69367]: _type = "Task" [ 707.957898] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.970152] env[69367]: DEBUG oslo_vmware.api [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52b05eaf-80da-af90-0e30-de48466abf4c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.978721] env[69367]: DEBUG nova.compute.manager [req-111d6119-5700-4b97-8e99-987d5cdd2277 req-eb4bd957-8c53-4cad-9b06-cd9d828d7d0c service nova] [instance: bdc0938b-60ef-463a-b3fd-1754f38a3b79] Received event network-changed-975c2dda-8218-4379-9269-8b01752fffff {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 707.978961] env[69367]: DEBUG nova.compute.manager [req-111d6119-5700-4b97-8e99-987d5cdd2277 req-eb4bd957-8c53-4cad-9b06-cd9d828d7d0c service nova] [instance: bdc0938b-60ef-463a-b3fd-1754f38a3b79] Refreshing instance network info cache due to event network-changed-975c2dda-8218-4379-9269-8b01752fffff. {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11772}} [ 707.979597] env[69367]: DEBUG oslo_concurrency.lockutils [req-111d6119-5700-4b97-8e99-987d5cdd2277 req-eb4bd957-8c53-4cad-9b06-cd9d828d7d0c service nova] Acquiring lock "refresh_cache-bdc0938b-60ef-463a-b3fd-1754f38a3b79" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 707.979841] env[69367]: DEBUG oslo_concurrency.lockutils [req-111d6119-5700-4b97-8e99-987d5cdd2277 req-eb4bd957-8c53-4cad-9b06-cd9d828d7d0c service nova] Acquired lock "refresh_cache-bdc0938b-60ef-463a-b3fd-1754f38a3b79" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 707.980088] env[69367]: DEBUG nova.network.neutron [req-111d6119-5700-4b97-8e99-987d5cdd2277 req-eb4bd957-8c53-4cad-9b06-cd9d828d7d0c service nova] [instance: bdc0938b-60ef-463a-b3fd-1754f38a3b79] Refreshing network info cache for port 975c2dda-8218-4379-9269-8b01752fffff {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 708.029029] env[69367]: DEBUG oslo_vmware.api [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Task: {'id': task-4233887, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.101941] env[69367]: DEBUG oslo_concurrency.lockutils [None req-b74dbd34-b054-442c-9c38-e89bb046091c tempest-AttachInterfacesUnderV243Test-977744864 tempest-AttachInterfacesUnderV243Test-977744864-project-member] Releasing lock "refresh_cache-250a50bf-c4b0-4997-9ce5-6dbeb617e9ed" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 708.102194] env[69367]: DEBUG nova.compute.manager [None req-b74dbd34-b054-442c-9c38-e89bb046091c tempest-AttachInterfacesUnderV243Test-977744864 tempest-AttachInterfacesUnderV243Test-977744864-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 708.102392] env[69367]: DEBUG nova.compute.manager [None req-b74dbd34-b054-442c-9c38-e89bb046091c tempest-AttachInterfacesUnderV243Test-977744864 tempest-AttachInterfacesUnderV243Test-977744864-project-member] [instance: 250a50bf-c4b0-4997-9ce5-6dbeb617e9ed] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 708.102561] env[69367]: DEBUG nova.network.neutron [None req-b74dbd34-b054-442c-9c38-e89bb046091c tempest-AttachInterfacesUnderV243Test-977744864 tempest-AttachInterfacesUnderV243Test-977744864-project-member] [instance: 250a50bf-c4b0-4997-9ce5-6dbeb617e9ed] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 708.274128] env[69367]: DEBUG nova.network.neutron [None req-b74dbd34-b054-442c-9c38-e89bb046091c tempest-AttachInterfacesUnderV243Test-977744864 tempest-AttachInterfacesUnderV243Test-977744864-project-member] [instance: 250a50bf-c4b0-4997-9ce5-6dbeb617e9ed] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 709.122026] env[69367]: DEBUG nova.network.neutron [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] Successfully updated port: df0ab2ea-5a93-4792-b937-b8327787a7b4 {{(pid=69367) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 709.125684] env[69367]: DEBUG nova.network.neutron [None req-b74dbd34-b054-442c-9c38-e89bb046091c tempest-AttachInterfacesUnderV243Test-977744864 tempest-AttachInterfacesUnderV243Test-977744864-project-member] [instance: 250a50bf-c4b0-4997-9ce5-6dbeb617e9ed] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 709.142024] env[69367]: DEBUG oslo_vmware.api [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Task: {'id': task-4233887, 'name': PowerOnVM_Task, 'duration_secs': 0.52597} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.144777] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] Powered on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 709.145203] env[69367]: INFO nova.compute.manager [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] Took 9.78 seconds to spawn the instance on the hypervisor. [ 709.145203] env[69367]: DEBUG nova.compute.manager [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] Checking state {{(pid=69367) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 709.146347] env[69367]: DEBUG oslo_vmware.api [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52b05eaf-80da-af90-0e30-de48466abf4c, 'name': SearchDatastore_Task, 'duration_secs': 0.015596} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.149111] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cbbff27-0899-4fba-8378-6f893941e3dc {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.153112] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b3eba87b-4e61-4307-85b4-43e6b894db41 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.159967] env[69367]: DEBUG oslo_vmware.api [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Waiting for the task: (returnval){ [ 709.159967] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]5227e653-4408-d500-fc66-a7412e3f2116" [ 709.159967] env[69367]: _type = "Task" [ 709.159967] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.176406] env[69367]: DEBUG oslo_vmware.api [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]5227e653-4408-d500-fc66-a7412e3f2116, 'name': SearchDatastore_Task, 'duration_secs': 0.010475} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.176665] env[69367]: DEBUG oslo_concurrency.lockutils [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 709.177039] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore2] bdc0938b-60ef-463a-b3fd-1754f38a3b79/bdc0938b-60ef-463a-b3fd-1754f38a3b79.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 709.177205] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d12218c1-740a-4244-89d0-db78c6481215 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.184797] env[69367]: DEBUG oslo_vmware.api [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Waiting for the task: (returnval){ [ 709.184797] env[69367]: value = "task-4233888" [ 709.184797] env[69367]: _type = "Task" [ 709.184797] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.193380] env[69367]: DEBUG oslo_vmware.api [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Task: {'id': task-4233888, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.291195] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfa826dc-8692-4716-9799-5100f4538f21 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.300261] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e81b0fd3-0fb8-4ef0-bda7-1c03cf551210 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.340794] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d4ac564-e7a3-4309-bd59-eca99f5aa5bd {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.350728] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5e786f0-4a0d-45e7-9c45-a8071c4bf1ad {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.367828] env[69367]: DEBUG nova.compute.provider_tree [None req-785c96af-d561-41aa-8b94-aba2c186e78c tempest-ServerActionsTestOtherA-1360477532 tempest-ServerActionsTestOtherA-1360477532-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 709.502613] env[69367]: DEBUG nova.network.neutron [req-111d6119-5700-4b97-8e99-987d5cdd2277 req-eb4bd957-8c53-4cad-9b06-cd9d828d7d0c service nova] [instance: bdc0938b-60ef-463a-b3fd-1754f38a3b79] Updated VIF entry in instance network info cache for port 975c2dda-8218-4379-9269-8b01752fffff. {{(pid=69367) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 709.502613] env[69367]: DEBUG nova.network.neutron [req-111d6119-5700-4b97-8e99-987d5cdd2277 req-eb4bd957-8c53-4cad-9b06-cd9d828d7d0c service nova] [instance: bdc0938b-60ef-463a-b3fd-1754f38a3b79] Updating instance_info_cache with network_info: [{"id": "975c2dda-8218-4379-9269-8b01752fffff", "address": "fa:16:3e:ea:af:52", "network": {"id": "bccc5cc6-71cd-4de1-b0c6-1342e5f21bff", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1889011946-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "24da0b94570b4204903f829e6292cbca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "022c7dd5-6c13-49c7-84f4-8b6c1fda4fb7", "external-id": "nsx-vlan-transportzone-694", "segmentation_id": 694, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap975c2dda-82", "ovs_interfaceid": "975c2dda-8218-4379-9269-8b01752fffff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 709.628518] env[69367]: INFO nova.compute.manager [None req-b74dbd34-b054-442c-9c38-e89bb046091c tempest-AttachInterfacesUnderV243Test-977744864 tempest-AttachInterfacesUnderV243Test-977744864-project-member] [instance: 250a50bf-c4b0-4997-9ce5-6dbeb617e9ed] Took 1.53 seconds to deallocate network for instance. [ 709.632521] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Acquiring lock "refresh_cache-788b843c-1496-4562-a761-44f3e1ce6da2" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 709.632723] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Acquired lock "refresh_cache-788b843c-1496-4562-a761-44f3e1ce6da2" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 709.632824] env[69367]: DEBUG nova.network.neutron [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 709.685555] env[69367]: INFO nova.compute.manager [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] Took 27.34 seconds to build instance. [ 709.700659] env[69367]: DEBUG oslo_vmware.api [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Task: {'id': task-4233888, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.894131] env[69367]: ERROR nova.scheduler.client.report [None req-785c96af-d561-41aa-8b94-aba2c186e78c tempest-ServerActionsTestOtherA-1360477532 tempest-ServerActionsTestOtherA-1360477532-project-member] [req-7140bbbd-4485-439e-989e-db34e439e45b] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-7140bbbd-4485-439e-989e-db34e439e45b"}]} [ 709.894551] env[69367]: DEBUG oslo_concurrency.lockutils [None req-785c96af-d561-41aa-8b94-aba2c186e78c tempest-ServerActionsTestOtherA-1360477532 tempest-ServerActionsTestOtherA-1360477532-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.122s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 709.895159] env[69367]: ERROR nova.compute.manager [None req-785c96af-d561-41aa-8b94-aba2c186e78c tempest-ServerActionsTestOtherA-1360477532 tempest-ServerActionsTestOtherA-1360477532-project-member] [instance: 26418f26-07ae-45e4-87d6-bdcf99674fb5] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 709.895159] env[69367]: ERROR nova.compute.manager [instance: 26418f26-07ae-45e4-87d6-bdcf99674fb5] Traceback (most recent call last): [ 709.895159] env[69367]: ERROR nova.compute.manager [instance: 26418f26-07ae-45e4-87d6-bdcf99674fb5] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 709.895159] env[69367]: ERROR nova.compute.manager [instance: 26418f26-07ae-45e4-87d6-bdcf99674fb5] yield [ 709.895159] env[69367]: ERROR nova.compute.manager [instance: 26418f26-07ae-45e4-87d6-bdcf99674fb5] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 709.895159] env[69367]: ERROR nova.compute.manager [instance: 26418f26-07ae-45e4-87d6-bdcf99674fb5] self.set_inventory_for_provider( [ 709.895159] env[69367]: ERROR nova.compute.manager [instance: 26418f26-07ae-45e4-87d6-bdcf99674fb5] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 709.895159] env[69367]: ERROR nova.compute.manager [instance: 26418f26-07ae-45e4-87d6-bdcf99674fb5] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 709.895447] env[69367]: ERROR nova.compute.manager [instance: 26418f26-07ae-45e4-87d6-bdcf99674fb5] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-7140bbbd-4485-439e-989e-db34e439e45b"}]} [ 709.895447] env[69367]: ERROR nova.compute.manager [instance: 26418f26-07ae-45e4-87d6-bdcf99674fb5] [ 709.895447] env[69367]: ERROR nova.compute.manager [instance: 26418f26-07ae-45e4-87d6-bdcf99674fb5] During handling of the above exception, another exception occurred: [ 709.895447] env[69367]: ERROR nova.compute.manager [instance: 26418f26-07ae-45e4-87d6-bdcf99674fb5] [ 709.895447] env[69367]: ERROR nova.compute.manager [instance: 26418f26-07ae-45e4-87d6-bdcf99674fb5] Traceback (most recent call last): [ 709.895447] env[69367]: ERROR nova.compute.manager [instance: 26418f26-07ae-45e4-87d6-bdcf99674fb5] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 709.895447] env[69367]: ERROR nova.compute.manager [instance: 26418f26-07ae-45e4-87d6-bdcf99674fb5] with self.rt.instance_claim(context, instance, node, allocs, [ 709.895447] env[69367]: ERROR nova.compute.manager [instance: 26418f26-07ae-45e4-87d6-bdcf99674fb5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 709.895447] env[69367]: ERROR nova.compute.manager [instance: 26418f26-07ae-45e4-87d6-bdcf99674fb5] return f(*args, **kwargs) [ 709.895714] env[69367]: ERROR nova.compute.manager [instance: 26418f26-07ae-45e4-87d6-bdcf99674fb5] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 709.895714] env[69367]: ERROR nova.compute.manager [instance: 26418f26-07ae-45e4-87d6-bdcf99674fb5] self._update(elevated, cn) [ 709.895714] env[69367]: ERROR nova.compute.manager [instance: 26418f26-07ae-45e4-87d6-bdcf99674fb5] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 709.895714] env[69367]: ERROR nova.compute.manager [instance: 26418f26-07ae-45e4-87d6-bdcf99674fb5] self._update_to_placement(context, compute_node, startup) [ 709.895714] env[69367]: ERROR nova.compute.manager [instance: 26418f26-07ae-45e4-87d6-bdcf99674fb5] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 709.895714] env[69367]: ERROR nova.compute.manager [instance: 26418f26-07ae-45e4-87d6-bdcf99674fb5] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 709.895714] env[69367]: ERROR nova.compute.manager [instance: 26418f26-07ae-45e4-87d6-bdcf99674fb5] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 709.895714] env[69367]: ERROR nova.compute.manager [instance: 26418f26-07ae-45e4-87d6-bdcf99674fb5] return attempt.get(self._wrap_exception) [ 709.895714] env[69367]: ERROR nova.compute.manager [instance: 26418f26-07ae-45e4-87d6-bdcf99674fb5] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 709.895714] env[69367]: ERROR nova.compute.manager [instance: 26418f26-07ae-45e4-87d6-bdcf99674fb5] six.reraise(self.value[0], self.value[1], self.value[2]) [ 709.895714] env[69367]: ERROR nova.compute.manager [instance: 26418f26-07ae-45e4-87d6-bdcf99674fb5] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 709.895714] env[69367]: ERROR nova.compute.manager [instance: 26418f26-07ae-45e4-87d6-bdcf99674fb5] raise value [ 709.895714] env[69367]: ERROR nova.compute.manager [instance: 26418f26-07ae-45e4-87d6-bdcf99674fb5] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 709.896116] env[69367]: ERROR nova.compute.manager [instance: 26418f26-07ae-45e4-87d6-bdcf99674fb5] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 709.896116] env[69367]: ERROR nova.compute.manager [instance: 26418f26-07ae-45e4-87d6-bdcf99674fb5] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 709.896116] env[69367]: ERROR nova.compute.manager [instance: 26418f26-07ae-45e4-87d6-bdcf99674fb5] self.reportclient.update_from_provider_tree( [ 709.896116] env[69367]: ERROR nova.compute.manager [instance: 26418f26-07ae-45e4-87d6-bdcf99674fb5] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 709.896116] env[69367]: ERROR nova.compute.manager [instance: 26418f26-07ae-45e4-87d6-bdcf99674fb5] with catch_all(pd.uuid): [ 709.896116] env[69367]: ERROR nova.compute.manager [instance: 26418f26-07ae-45e4-87d6-bdcf99674fb5] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 709.896116] env[69367]: ERROR nova.compute.manager [instance: 26418f26-07ae-45e4-87d6-bdcf99674fb5] self.gen.throw(typ, value, traceback) [ 709.896116] env[69367]: ERROR nova.compute.manager [instance: 26418f26-07ae-45e4-87d6-bdcf99674fb5] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 709.896116] env[69367]: ERROR nova.compute.manager [instance: 26418f26-07ae-45e4-87d6-bdcf99674fb5] raise exception.ResourceProviderSyncFailed() [ 709.896116] env[69367]: ERROR nova.compute.manager [instance: 26418f26-07ae-45e4-87d6-bdcf99674fb5] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 709.896116] env[69367]: ERROR nova.compute.manager [instance: 26418f26-07ae-45e4-87d6-bdcf99674fb5] [ 709.896388] env[69367]: DEBUG nova.compute.utils [None req-785c96af-d561-41aa-8b94-aba2c186e78c tempest-ServerActionsTestOtherA-1360477532 tempest-ServerActionsTestOtherA-1360477532-project-member] [instance: 26418f26-07ae-45e4-87d6-bdcf99674fb5] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 709.897929] env[69367]: DEBUG oslo_concurrency.lockutils [None req-02258f01-0f7a-47d5-8f19-e7e1d8fc8a57 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.610s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 709.897929] env[69367]: DEBUG nova.objects.instance [None req-02258f01-0f7a-47d5-8f19-e7e1d8fc8a57 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Lazy-loading 'resources' on Instance uuid c17525ee-d038-4c81-932b-ed74a6de6cb5 {{(pid=69367) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 709.899596] env[69367]: DEBUG nova.compute.manager [None req-785c96af-d561-41aa-8b94-aba2c186e78c tempest-ServerActionsTestOtherA-1360477532 tempest-ServerActionsTestOtherA-1360477532-project-member] [instance: 26418f26-07ae-45e4-87d6-bdcf99674fb5] Build of instance 26418f26-07ae-45e4-87d6-bdcf99674fb5 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 709.900014] env[69367]: DEBUG nova.compute.manager [None req-785c96af-d561-41aa-8b94-aba2c186e78c tempest-ServerActionsTestOtherA-1360477532 tempest-ServerActionsTestOtherA-1360477532-project-member] [instance: 26418f26-07ae-45e4-87d6-bdcf99674fb5] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 709.900254] env[69367]: DEBUG oslo_concurrency.lockutils [None req-785c96af-d561-41aa-8b94-aba2c186e78c tempest-ServerActionsTestOtherA-1360477532 tempest-ServerActionsTestOtherA-1360477532-project-member] Acquiring lock "refresh_cache-26418f26-07ae-45e4-87d6-bdcf99674fb5" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 709.900403] env[69367]: DEBUG oslo_concurrency.lockutils [None req-785c96af-d561-41aa-8b94-aba2c186e78c tempest-ServerActionsTestOtherA-1360477532 tempest-ServerActionsTestOtherA-1360477532-project-member] Acquired lock "refresh_cache-26418f26-07ae-45e4-87d6-bdcf99674fb5" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 709.904016] env[69367]: DEBUG nova.network.neutron [None req-785c96af-d561-41aa-8b94-aba2c186e78c tempest-ServerActionsTestOtherA-1360477532 tempest-ServerActionsTestOtherA-1360477532-project-member] [instance: 26418f26-07ae-45e4-87d6-bdcf99674fb5] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 710.009218] env[69367]: DEBUG oslo_concurrency.lockutils [req-111d6119-5700-4b97-8e99-987d5cdd2277 req-eb4bd957-8c53-4cad-9b06-cd9d828d7d0c service nova] Releasing lock "refresh_cache-bdc0938b-60ef-463a-b3fd-1754f38a3b79" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 710.174359] env[69367]: DEBUG nova.network.neutron [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 710.188973] env[69367]: DEBUG oslo_concurrency.lockutils [None req-edcd6451-8e2d-46d2-bbd3-5224f740ef2e tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Lock "10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 46.367s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 710.200516] env[69367]: DEBUG oslo_vmware.api [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Task: {'id': task-4233888, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.359974] env[69367]: DEBUG nova.network.neutron [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] Updating instance_info_cache with network_info: [{"id": "df0ab2ea-5a93-4792-b937-b8327787a7b4", "address": "fa:16:3e:ab:f3:e3", "network": {"id": "dd68ce65-5682-4b4c-913c-cf699d2146be", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-341319856-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2375d6603eef45069be4a3541519002a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2c019b6-3ef3-4c8f-95bd-edede2c554a9", "external-id": "nsx-vlan-transportzone-364", "segmentation_id": 364, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdf0ab2ea-5a", "ovs_interfaceid": "df0ab2ea-5a93-4792-b937-b8327787a7b4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 710.383589] env[69367]: DEBUG oslo_concurrency.lockutils [None req-05f963a1-2d97-4edd-9c3d-11be8b7b68ec tempest-ServerAddressesNegativeTestJSON-302550741 tempest-ServerAddressesNegativeTestJSON-302550741-project-member] Acquiring lock "e4db7bcc-26dd-4f0d-80da-655a58c80783" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 710.383890] env[69367]: DEBUG oslo_concurrency.lockutils [None req-05f963a1-2d97-4edd-9c3d-11be8b7b68ec tempest-ServerAddressesNegativeTestJSON-302550741 tempest-ServerAddressesNegativeTestJSON-302550741-project-member] Lock "e4db7bcc-26dd-4f0d-80da-655a58c80783" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 710.425249] env[69367]: DEBUG nova.scheduler.client.report [None req-02258f01-0f7a-47d5-8f19-e7e1d8fc8a57 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 710.433183] env[69367]: DEBUG nova.network.neutron [None req-785c96af-d561-41aa-8b94-aba2c186e78c tempest-ServerActionsTestOtherA-1360477532 tempest-ServerActionsTestOtherA-1360477532-project-member] [instance: 26418f26-07ae-45e4-87d6-bdcf99674fb5] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 710.440554] env[69367]: DEBUG nova.scheduler.client.report [None req-02258f01-0f7a-47d5-8f19-e7e1d8fc8a57 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 710.440779] env[69367]: DEBUG nova.compute.provider_tree [None req-02258f01-0f7a-47d5-8f19-e7e1d8fc8a57 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 710.461528] env[69367]: DEBUG nova.scheduler.client.report [None req-02258f01-0f7a-47d5-8f19-e7e1d8fc8a57 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 710.480749] env[69367]: DEBUG nova.scheduler.client.report [None req-02258f01-0f7a-47d5-8f19-e7e1d8fc8a57 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 710.545539] env[69367]: DEBUG nova.network.neutron [None req-785c96af-d561-41aa-8b94-aba2c186e78c tempest-ServerActionsTestOtherA-1360477532 tempest-ServerActionsTestOtherA-1360477532-project-member] [instance: 26418f26-07ae-45e4-87d6-bdcf99674fb5] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 710.668400] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5c130717-e658-4600-96f7-a983167af7af tempest-ServersTestFqdnHostnames-2115646556 tempest-ServersTestFqdnHostnames-2115646556-project-member] Acquiring lock "40e49f7b-e5f7-4673-a764-d8cec8a3cf18" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 710.669032] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5c130717-e658-4600-96f7-a983167af7af tempest-ServersTestFqdnHostnames-2115646556 tempest-ServersTestFqdnHostnames-2115646556-project-member] Lock "40e49f7b-e5f7-4673-a764-d8cec8a3cf18" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 710.673160] env[69367]: INFO nova.scheduler.client.report [None req-b74dbd34-b054-442c-9c38-e89bb046091c tempest-AttachInterfacesUnderV243Test-977744864 tempest-AttachInterfacesUnderV243Test-977744864-project-member] Deleted allocations for instance 250a50bf-c4b0-4997-9ce5-6dbeb617e9ed [ 710.694754] env[69367]: DEBUG nova.compute.manager [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 710.707959] env[69367]: DEBUG oslo_vmware.api [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Task: {'id': task-4233888, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.769516] env[69367]: DEBUG nova.compute.manager [req-4bb1f973-c7d8-4c31-9f4b-54e4e0b8ce30 req-a529382a-8289-4cba-9d72-6d6315ab5560 service nova] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] Received event network-vif-plugged-df0ab2ea-5a93-4792-b937-b8327787a7b4 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 710.769516] env[69367]: DEBUG oslo_concurrency.lockutils [req-4bb1f973-c7d8-4c31-9f4b-54e4e0b8ce30 req-a529382a-8289-4cba-9d72-6d6315ab5560 service nova] Acquiring lock "788b843c-1496-4562-a761-44f3e1ce6da2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 710.769607] env[69367]: DEBUG oslo_concurrency.lockutils [req-4bb1f973-c7d8-4c31-9f4b-54e4e0b8ce30 req-a529382a-8289-4cba-9d72-6d6315ab5560 service nova] Lock "788b843c-1496-4562-a761-44f3e1ce6da2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 710.769715] env[69367]: DEBUG oslo_concurrency.lockutils [req-4bb1f973-c7d8-4c31-9f4b-54e4e0b8ce30 req-a529382a-8289-4cba-9d72-6d6315ab5560 service nova] Lock "788b843c-1496-4562-a761-44f3e1ce6da2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 710.769885] env[69367]: DEBUG nova.compute.manager [req-4bb1f973-c7d8-4c31-9f4b-54e4e0b8ce30 req-a529382a-8289-4cba-9d72-6d6315ab5560 service nova] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] No waiting events found dispatching network-vif-plugged-df0ab2ea-5a93-4792-b937-b8327787a7b4 {{(pid=69367) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 710.770133] env[69367]: WARNING nova.compute.manager [req-4bb1f973-c7d8-4c31-9f4b-54e4e0b8ce30 req-a529382a-8289-4cba-9d72-6d6315ab5560 service nova] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] Received unexpected event network-vif-plugged-df0ab2ea-5a93-4792-b937-b8327787a7b4 for instance with vm_state building and task_state spawning. [ 710.770343] env[69367]: DEBUG nova.compute.manager [req-4bb1f973-c7d8-4c31-9f4b-54e4e0b8ce30 req-a529382a-8289-4cba-9d72-6d6315ab5560 service nova] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] Received event network-changed-df0ab2ea-5a93-4792-b937-b8327787a7b4 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 710.770530] env[69367]: DEBUG nova.compute.manager [req-4bb1f973-c7d8-4c31-9f4b-54e4e0b8ce30 req-a529382a-8289-4cba-9d72-6d6315ab5560 service nova] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] Refreshing instance network info cache due to event network-changed-df0ab2ea-5a93-4792-b937-b8327787a7b4. {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11772}} [ 710.770704] env[69367]: DEBUG oslo_concurrency.lockutils [req-4bb1f973-c7d8-4c31-9f4b-54e4e0b8ce30 req-a529382a-8289-4cba-9d72-6d6315ab5560 service nova] Acquiring lock "refresh_cache-788b843c-1496-4562-a761-44f3e1ce6da2" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 710.774754] env[69367]: DEBUG oslo_concurrency.lockutils [None req-abe26bce-ad1a-4a45-984f-937494cbba7c tempest-ServersTestJSON-1126084006 tempest-ServersTestJSON-1126084006-project-member] Acquiring lock "d6c2606d-0c6c-4add-b6f5-8229c21b56be" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 710.774754] env[69367]: DEBUG oslo_concurrency.lockutils [None req-abe26bce-ad1a-4a45-984f-937494cbba7c tempest-ServersTestJSON-1126084006 tempest-ServersTestJSON-1126084006-project-member] Lock "d6c2606d-0c6c-4add-b6f5-8229c21b56be" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 710.862396] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Releasing lock "refresh_cache-788b843c-1496-4562-a761-44f3e1ce6da2" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 710.862753] env[69367]: DEBUG nova.compute.manager [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] Instance network_info: |[{"id": "df0ab2ea-5a93-4792-b937-b8327787a7b4", "address": "fa:16:3e:ab:f3:e3", "network": {"id": "dd68ce65-5682-4b4c-913c-cf699d2146be", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-341319856-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2375d6603eef45069be4a3541519002a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2c019b6-3ef3-4c8f-95bd-edede2c554a9", "external-id": "nsx-vlan-transportzone-364", "segmentation_id": 364, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdf0ab2ea-5a", "ovs_interfaceid": "df0ab2ea-5a93-4792-b937-b8327787a7b4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69367) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 710.863151] env[69367]: DEBUG oslo_concurrency.lockutils [req-4bb1f973-c7d8-4c31-9f4b-54e4e0b8ce30 req-a529382a-8289-4cba-9d72-6d6315ab5560 service nova] Acquired lock "refresh_cache-788b843c-1496-4562-a761-44f3e1ce6da2" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 710.863362] env[69367]: DEBUG nova.network.neutron [req-4bb1f973-c7d8-4c31-9f4b-54e4e0b8ce30 req-a529382a-8289-4cba-9d72-6d6315ab5560 service nova] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] Refreshing network info cache for port df0ab2ea-5a93-4792-b937-b8327787a7b4 {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 710.864581] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ab:f3:e3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b2c019b6-3ef3-4c8f-95bd-edede2c554a9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'df0ab2ea-5a93-4792-b937-b8327787a7b4', 'vif_model': 'vmxnet3'}] {{(pid=69367) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 710.875495] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 710.879266] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] Creating VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 710.880698] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f33fef49-2c27-4e23-aa7b-b75b431ec8cd {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.907838] env[69367]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 710.907838] env[69367]: value = "task-4233889" [ 710.907838] env[69367]: _type = "Task" [ 710.907838] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.920965] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4233889, 'name': CreateVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.018729] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4329bce0-08b5-4470-a4c1-abff51dcb87b {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.030057] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa64d2a4-a4a8-43aa-a0f9-c269f383b01d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.066594] env[69367]: DEBUG oslo_concurrency.lockutils [None req-785c96af-d561-41aa-8b94-aba2c186e78c tempest-ServerActionsTestOtherA-1360477532 tempest-ServerActionsTestOtherA-1360477532-project-member] Releasing lock "refresh_cache-26418f26-07ae-45e4-87d6-bdcf99674fb5" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 711.066843] env[69367]: DEBUG nova.compute.manager [None req-785c96af-d561-41aa-8b94-aba2c186e78c tempest-ServerActionsTestOtherA-1360477532 tempest-ServerActionsTestOtherA-1360477532-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 711.067034] env[69367]: DEBUG nova.compute.manager [None req-785c96af-d561-41aa-8b94-aba2c186e78c tempest-ServerActionsTestOtherA-1360477532 tempest-ServerActionsTestOtherA-1360477532-project-member] [instance: 26418f26-07ae-45e4-87d6-bdcf99674fb5] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 711.067206] env[69367]: DEBUG nova.network.neutron [None req-785c96af-d561-41aa-8b94-aba2c186e78c tempest-ServerActionsTestOtherA-1360477532 tempest-ServerActionsTestOtherA-1360477532-project-member] [instance: 26418f26-07ae-45e4-87d6-bdcf99674fb5] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 711.069634] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f990bc1e-139a-4ecb-bb91-435e606c7df7 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.077927] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6537d80e-b73b-47c3-8e2e-3a471f303bb0 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.096213] env[69367]: DEBUG nova.compute.provider_tree [None req-02258f01-0f7a-47d5-8f19-e7e1d8fc8a57 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 711.098446] env[69367]: DEBUG nova.network.neutron [None req-785c96af-d561-41aa-8b94-aba2c186e78c tempest-ServerActionsTestOtherA-1360477532 tempest-ServerActionsTestOtherA-1360477532-project-member] [instance: 26418f26-07ae-45e4-87d6-bdcf99674fb5] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 711.184932] env[69367]: DEBUG oslo_concurrency.lockutils [None req-b74dbd34-b054-442c-9c38-e89bb046091c tempest-AttachInterfacesUnderV243Test-977744864 tempest-AttachInterfacesUnderV243Test-977744864-project-member] Lock "250a50bf-c4b0-4997-9ce5-6dbeb617e9ed" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.397s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 711.204053] env[69367]: DEBUG oslo_vmware.api [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Task: {'id': task-4233888, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.526412} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.204332] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore2] bdc0938b-60ef-463a-b3fd-1754f38a3b79/bdc0938b-60ef-463a-b3fd-1754f38a3b79.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 711.204535] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] [instance: bdc0938b-60ef-463a-b3fd-1754f38a3b79] Extending root virtual disk to 1048576 {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 711.205227] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-499888f1-ccc8-4721-973c-c5a885edea0c {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.216743] env[69367]: DEBUG oslo_vmware.api [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Waiting for the task: (returnval){ [ 711.216743] env[69367]: value = "task-4233890" [ 711.216743] env[69367]: _type = "Task" [ 711.216743] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.225546] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 711.231063] env[69367]: DEBUG oslo_vmware.api [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Task: {'id': task-4233890, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.421523] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4233889, 'name': CreateVM_Task, 'duration_secs': 0.495568} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.421775] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] Created VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 711.422785] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 711.423085] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 711.423557] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 711.423847] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0695957c-bcb9-4aa5-9f60-4141712f5b1f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.429447] env[69367]: DEBUG oslo_vmware.api [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Waiting for the task: (returnval){ [ 711.429447] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52c29600-532d-478d-5b40-14299d058133" [ 711.429447] env[69367]: _type = "Task" [ 711.429447] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.438260] env[69367]: DEBUG oslo_vmware.api [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52c29600-532d-478d-5b40-14299d058133, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.604644] env[69367]: DEBUG nova.network.neutron [None req-785c96af-d561-41aa-8b94-aba2c186e78c tempest-ServerActionsTestOtherA-1360477532 tempest-ServerActionsTestOtherA-1360477532-project-member] [instance: 26418f26-07ae-45e4-87d6-bdcf99674fb5] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 711.631652] env[69367]: ERROR nova.scheduler.client.report [None req-02258f01-0f7a-47d5-8f19-e7e1d8fc8a57 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [req-c40d6371-86b9-4fe9-80af-cced8aac3b7b] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-c40d6371-86b9-4fe9-80af-cced8aac3b7b"}]} [ 711.632080] env[69367]: DEBUG oslo_concurrency.lockutils [None req-02258f01-0f7a-47d5-8f19-e7e1d8fc8a57 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.735s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 711.632746] env[69367]: ERROR nova.compute.manager [None req-02258f01-0f7a-47d5-8f19-e7e1d8fc8a57 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] Setting instance vm_state to ERROR: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 711.632746] env[69367]: ERROR nova.compute.manager [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] Traceback (most recent call last): [ 711.632746] env[69367]: ERROR nova.compute.manager [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 711.632746] env[69367]: ERROR nova.compute.manager [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] yield [ 711.632746] env[69367]: ERROR nova.compute.manager [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 711.632746] env[69367]: ERROR nova.compute.manager [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] self.set_inventory_for_provider( [ 711.632746] env[69367]: ERROR nova.compute.manager [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 711.632746] env[69367]: ERROR nova.compute.manager [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 711.633215] env[69367]: ERROR nova.compute.manager [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-c40d6371-86b9-4fe9-80af-cced8aac3b7b"}]} [ 711.633215] env[69367]: ERROR nova.compute.manager [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] [ 711.633215] env[69367]: ERROR nova.compute.manager [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] During handling of the above exception, another exception occurred: [ 711.633215] env[69367]: ERROR nova.compute.manager [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] [ 711.633215] env[69367]: ERROR nova.compute.manager [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] Traceback (most recent call last): [ 711.633215] env[69367]: ERROR nova.compute.manager [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 711.633215] env[69367]: ERROR nova.compute.manager [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] self._delete_instance(context, instance, bdms) [ 711.633215] env[69367]: ERROR nova.compute.manager [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 711.633215] env[69367]: ERROR nova.compute.manager [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] self._complete_deletion(context, instance) [ 711.633524] env[69367]: ERROR nova.compute.manager [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 711.633524] env[69367]: ERROR nova.compute.manager [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] self._update_resource_tracker(context, instance) [ 711.633524] env[69367]: ERROR nova.compute.manager [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 711.633524] env[69367]: ERROR nova.compute.manager [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] self.rt.update_usage(context, instance, instance.node) [ 711.633524] env[69367]: ERROR nova.compute.manager [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 711.633524] env[69367]: ERROR nova.compute.manager [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] return f(*args, **kwargs) [ 711.633524] env[69367]: ERROR nova.compute.manager [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 711.633524] env[69367]: ERROR nova.compute.manager [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] self._update(context.elevated(), self.compute_nodes[nodename]) [ 711.633524] env[69367]: ERROR nova.compute.manager [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 711.633524] env[69367]: ERROR nova.compute.manager [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] self._update_to_placement(context, compute_node, startup) [ 711.633524] env[69367]: ERROR nova.compute.manager [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 711.633524] env[69367]: ERROR nova.compute.manager [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 711.633871] env[69367]: ERROR nova.compute.manager [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 711.633871] env[69367]: ERROR nova.compute.manager [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] return attempt.get(self._wrap_exception) [ 711.633871] env[69367]: ERROR nova.compute.manager [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 711.633871] env[69367]: ERROR nova.compute.manager [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] six.reraise(self.value[0], self.value[1], self.value[2]) [ 711.633871] env[69367]: ERROR nova.compute.manager [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 711.633871] env[69367]: ERROR nova.compute.manager [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] raise value [ 711.633871] env[69367]: ERROR nova.compute.manager [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 711.633871] env[69367]: ERROR nova.compute.manager [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 711.633871] env[69367]: ERROR nova.compute.manager [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 711.633871] env[69367]: ERROR nova.compute.manager [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] self.reportclient.update_from_provider_tree( [ 711.633871] env[69367]: ERROR nova.compute.manager [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 711.633871] env[69367]: ERROR nova.compute.manager [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] with catch_all(pd.uuid): [ 711.633871] env[69367]: ERROR nova.compute.manager [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 711.634251] env[69367]: ERROR nova.compute.manager [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] self.gen.throw(typ, value, traceback) [ 711.634251] env[69367]: ERROR nova.compute.manager [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 711.634251] env[69367]: ERROR nova.compute.manager [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] raise exception.ResourceProviderSyncFailed() [ 711.634251] env[69367]: ERROR nova.compute.manager [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 711.634251] env[69367]: ERROR nova.compute.manager [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] [ 711.635042] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91e9d1ea-2ff5-4ae3-a101-b6ada7c4acd5 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.378s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 711.635268] env[69367]: DEBUG nova.objects.instance [None req-91e9d1ea-2ff5-4ae3-a101-b6ada7c4acd5 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Lazy-loading 'resources' on Instance uuid ab365570-ac29-4094-be4c-d49563a465c8 {{(pid=69367) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 711.683084] env[69367]: DEBUG nova.network.neutron [req-4bb1f973-c7d8-4c31-9f4b-54e4e0b8ce30 req-a529382a-8289-4cba-9d72-6d6315ab5560 service nova] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] Updated VIF entry in instance network info cache for port df0ab2ea-5a93-4792-b937-b8327787a7b4. {{(pid=69367) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 711.684146] env[69367]: DEBUG nova.network.neutron [req-4bb1f973-c7d8-4c31-9f4b-54e4e0b8ce30 req-a529382a-8289-4cba-9d72-6d6315ab5560 service nova] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] Updating instance_info_cache with network_info: [{"id": "df0ab2ea-5a93-4792-b937-b8327787a7b4", "address": "fa:16:3e:ab:f3:e3", "network": {"id": "dd68ce65-5682-4b4c-913c-cf699d2146be", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-341319856-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2375d6603eef45069be4a3541519002a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2c019b6-3ef3-4c8f-95bd-edede2c554a9", "external-id": "nsx-vlan-transportzone-364", "segmentation_id": 364, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdf0ab2ea-5a", "ovs_interfaceid": "df0ab2ea-5a93-4792-b937-b8327787a7b4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 711.688318] env[69367]: DEBUG nova.compute.manager [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 711.727061] env[69367]: DEBUG oslo_vmware.api [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Task: {'id': task-4233890, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082816} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.727390] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] [instance: bdc0938b-60ef-463a-b3fd-1754f38a3b79] Extended root virtual disk {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 711.728220] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6bb2906-c3c2-4727-83ad-15d29dea0c46 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.759043] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] [instance: bdc0938b-60ef-463a-b3fd-1754f38a3b79] Reconfiguring VM instance instance-00000021 to attach disk [datastore2] bdc0938b-60ef-463a-b3fd-1754f38a3b79/bdc0938b-60ef-463a-b3fd-1754f38a3b79.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 711.760438] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9244b727-096f-46eb-80e9-456d219f0f3f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.780709] env[69367]: DEBUG oslo_vmware.api [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Waiting for the task: (returnval){ [ 711.780709] env[69367]: value = "task-4233891" [ 711.780709] env[69367]: _type = "Task" [ 711.780709] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.791335] env[69367]: DEBUG oslo_vmware.api [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Task: {'id': task-4233891, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.942246] env[69367]: DEBUG oslo_vmware.api [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52c29600-532d-478d-5b40-14299d058133, 'name': SearchDatastore_Task, 'duration_secs': 0.009405} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.942484] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 711.942818] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] Processing image 2b099420-9152-4d93-9609-4c9317824c11 {{(pid=69367) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 711.943107] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 711.943297] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 711.943524] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 711.943825] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8ddf7c31-30a8-4493-855b-23981a6be51d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.957380] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 711.957380] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69367) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 711.957380] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d48bfce9-f419-435a-a6da-0dc4a244cb6a {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.965441] env[69367]: DEBUG oslo_vmware.api [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Waiting for the task: (returnval){ [ 711.965441] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52f15d32-e622-78df-1d67-16f14bb948a0" [ 711.965441] env[69367]: _type = "Task" [ 711.965441] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.976778] env[69367]: DEBUG oslo_vmware.api [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52f15d32-e622-78df-1d67-16f14bb948a0, 'name': SearchDatastore_Task, 'duration_secs': 0.009372} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.977698] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-304fb568-680f-49fc-9706-713e485c46e5 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.984058] env[69367]: DEBUG oslo_vmware.api [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Waiting for the task: (returnval){ [ 711.984058] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52efaa62-af6c-b68b-c81a-55bbd5b8a815" [ 711.984058] env[69367]: _type = "Task" [ 711.984058] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.995358] env[69367]: DEBUG oslo_vmware.api [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52efaa62-af6c-b68b-c81a-55bbd5b8a815, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.111035] env[69367]: INFO nova.compute.manager [None req-785c96af-d561-41aa-8b94-aba2c186e78c tempest-ServerActionsTestOtherA-1360477532 tempest-ServerActionsTestOtherA-1360477532-project-member] [instance: 26418f26-07ae-45e4-87d6-bdcf99674fb5] Took 1.04 seconds to deallocate network for instance. [ 712.141536] env[69367]: DEBUG oslo_concurrency.lockutils [None req-02258f01-0f7a-47d5-8f19-e7e1d8fc8a57 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Lock "c17525ee-d038-4c81-932b-ed74a6de6cb5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.715s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 712.164444] env[69367]: DEBUG nova.scheduler.client.report [None req-91e9d1ea-2ff5-4ae3-a101-b6ada7c4acd5 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 712.185659] env[69367]: DEBUG oslo_concurrency.lockutils [req-4bb1f973-c7d8-4c31-9f4b-54e4e0b8ce30 req-a529382a-8289-4cba-9d72-6d6315ab5560 service nova] Releasing lock "refresh_cache-788b843c-1496-4562-a761-44f3e1ce6da2" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 712.188300] env[69367]: DEBUG nova.scheduler.client.report [None req-91e9d1ea-2ff5-4ae3-a101-b6ada7c4acd5 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 712.189125] env[69367]: DEBUG nova.compute.provider_tree [None req-91e9d1ea-2ff5-4ae3-a101-b6ada7c4acd5 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 712.204584] env[69367]: DEBUG nova.scheduler.client.report [None req-91e9d1ea-2ff5-4ae3-a101-b6ada7c4acd5 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 712.212869] env[69367]: DEBUG oslo_concurrency.lockutils [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 712.229081] env[69367]: DEBUG nova.scheduler.client.report [None req-91e9d1ea-2ff5-4ae3-a101-b6ada7c4acd5 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 712.296379] env[69367]: DEBUG oslo_vmware.api [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Task: {'id': task-4233891, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.497204] env[69367]: DEBUG oslo_vmware.api [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52efaa62-af6c-b68b-c81a-55bbd5b8a815, 'name': SearchDatastore_Task, 'duration_secs': 0.010129} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.497736] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 712.498175] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore2] 788b843c-1496-4562-a761-44f3e1ce6da2/788b843c-1496-4562-a761-44f3e1ce6da2.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 712.498562] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ced94779-b57d-431e-aa7f-769befc4ce29 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.507333] env[69367]: DEBUG oslo_vmware.api [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Waiting for the task: (returnval){ [ 712.507333] env[69367]: value = "task-4233892" [ 712.507333] env[69367]: _type = "Task" [ 712.507333] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.518936] env[69367]: DEBUG oslo_vmware.api [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Task: {'id': task-4233892, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.735581] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75e764a5-4fb0-4062-9e29-3a4c3b976c25 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.746275] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8ea4d07-9ca3-4468-a657-a523d80feb47 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.787384] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b24d361-9543-4b1c-b52c-4a49bbc7bc83 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.800086] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1547e35e-8144-47be-8c32-148925f54c0d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.804504] env[69367]: DEBUG oslo_vmware.api [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Task: {'id': task-4233891, 'name': ReconfigVM_Task, 'duration_secs': 0.553525} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.804805] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] [instance: bdc0938b-60ef-463a-b3fd-1754f38a3b79] Reconfigured VM instance instance-00000021 to attach disk [datastore2] bdc0938b-60ef-463a-b3fd-1754f38a3b79/bdc0938b-60ef-463a-b3fd-1754f38a3b79.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 712.806066] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7972a13c-9eaa-4329-8bcf-67f8f4d291b6 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.817274] env[69367]: DEBUG nova.compute.provider_tree [None req-91e9d1ea-2ff5-4ae3-a101-b6ada7c4acd5 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 712.820309] env[69367]: DEBUG oslo_vmware.api [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Waiting for the task: (returnval){ [ 712.820309] env[69367]: value = "task-4233893" [ 712.820309] env[69367]: _type = "Task" [ 712.820309] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.830512] env[69367]: DEBUG oslo_vmware.api [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Task: {'id': task-4233893, 'name': Rename_Task} progress is 10%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.025542] env[69367]: DEBUG oslo_vmware.api [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Task: {'id': task-4233892, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.499343} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.027404] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore2] 788b843c-1496-4562-a761-44f3e1ce6da2/788b843c-1496-4562-a761-44f3e1ce6da2.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 713.027404] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] Extending root virtual disk to 1048576 {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 713.027404] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ca1a0938-7d1c-4e34-90d1-2f5039b35c6f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.038205] env[69367]: DEBUG oslo_vmware.api [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Waiting for the task: (returnval){ [ 713.038205] env[69367]: value = "task-4233894" [ 713.038205] env[69367]: _type = "Task" [ 713.038205] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.047269] env[69367]: DEBUG oslo_vmware.api [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Task: {'id': task-4233894, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.155313] env[69367]: INFO nova.scheduler.client.report [None req-785c96af-d561-41aa-8b94-aba2c186e78c tempest-ServerActionsTestOtherA-1360477532 tempest-ServerActionsTestOtherA-1360477532-project-member] Deleted allocations for instance 26418f26-07ae-45e4-87d6-bdcf99674fb5 [ 713.340769] env[69367]: DEBUG oslo_vmware.api [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Task: {'id': task-4233893, 'name': Rename_Task, 'duration_secs': 0.293595} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.340769] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] [instance: bdc0938b-60ef-463a-b3fd-1754f38a3b79] Powering on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 713.340769] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-52d411c3-b2cf-4415-9375-56a3a51ec07f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.346847] env[69367]: DEBUG oslo_vmware.api [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Waiting for the task: (returnval){ [ 713.346847] env[69367]: value = "task-4233895" [ 713.346847] env[69367]: _type = "Task" [ 713.346847] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.351349] env[69367]: ERROR nova.scheduler.client.report [None req-91e9d1ea-2ff5-4ae3-a101-b6ada7c4acd5 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [req-b74434d3-8f97-4429-96a0-96dc06eb3f1c] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-b74434d3-8f97-4429-96a0-96dc06eb3f1c"}]} [ 713.351599] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91e9d1ea-2ff5-4ae3-a101-b6ada7c4acd5 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.717s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 713.352260] env[69367]: ERROR nova.compute.manager [None req-91e9d1ea-2ff5-4ae3-a101-b6ada7c4acd5 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab365570-ac29-4094-be4c-d49563a465c8] Setting instance vm_state to ERROR: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 713.352260] env[69367]: ERROR nova.compute.manager [instance: ab365570-ac29-4094-be4c-d49563a465c8] Traceback (most recent call last): [ 713.352260] env[69367]: ERROR nova.compute.manager [instance: ab365570-ac29-4094-be4c-d49563a465c8] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 713.352260] env[69367]: ERROR nova.compute.manager [instance: ab365570-ac29-4094-be4c-d49563a465c8] yield [ 713.352260] env[69367]: ERROR nova.compute.manager [instance: ab365570-ac29-4094-be4c-d49563a465c8] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 713.352260] env[69367]: ERROR nova.compute.manager [instance: ab365570-ac29-4094-be4c-d49563a465c8] self.set_inventory_for_provider( [ 713.352260] env[69367]: ERROR nova.compute.manager [instance: ab365570-ac29-4094-be4c-d49563a465c8] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 713.352260] env[69367]: ERROR nova.compute.manager [instance: ab365570-ac29-4094-be4c-d49563a465c8] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 713.352561] env[69367]: ERROR nova.compute.manager [instance: ab365570-ac29-4094-be4c-d49563a465c8] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-b74434d3-8f97-4429-96a0-96dc06eb3f1c"}]} [ 713.352561] env[69367]: ERROR nova.compute.manager [instance: ab365570-ac29-4094-be4c-d49563a465c8] [ 713.352561] env[69367]: ERROR nova.compute.manager [instance: ab365570-ac29-4094-be4c-d49563a465c8] During handling of the above exception, another exception occurred: [ 713.352561] env[69367]: ERROR nova.compute.manager [instance: ab365570-ac29-4094-be4c-d49563a465c8] [ 713.352561] env[69367]: ERROR nova.compute.manager [instance: ab365570-ac29-4094-be4c-d49563a465c8] Traceback (most recent call last): [ 713.352561] env[69367]: ERROR nova.compute.manager [instance: ab365570-ac29-4094-be4c-d49563a465c8] File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 713.352561] env[69367]: ERROR nova.compute.manager [instance: ab365570-ac29-4094-be4c-d49563a465c8] self._delete_instance(context, instance, bdms) [ 713.352561] env[69367]: ERROR nova.compute.manager [instance: ab365570-ac29-4094-be4c-d49563a465c8] File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 713.352561] env[69367]: ERROR nova.compute.manager [instance: ab365570-ac29-4094-be4c-d49563a465c8] self._complete_deletion(context, instance) [ 713.354709] env[69367]: ERROR nova.compute.manager [instance: ab365570-ac29-4094-be4c-d49563a465c8] File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 713.354709] env[69367]: ERROR nova.compute.manager [instance: ab365570-ac29-4094-be4c-d49563a465c8] self._update_resource_tracker(context, instance) [ 713.354709] env[69367]: ERROR nova.compute.manager [instance: ab365570-ac29-4094-be4c-d49563a465c8] File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 713.354709] env[69367]: ERROR nova.compute.manager [instance: ab365570-ac29-4094-be4c-d49563a465c8] self.rt.update_usage(context, instance, instance.node) [ 713.354709] env[69367]: ERROR nova.compute.manager [instance: ab365570-ac29-4094-be4c-d49563a465c8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 713.354709] env[69367]: ERROR nova.compute.manager [instance: ab365570-ac29-4094-be4c-d49563a465c8] return f(*args, **kwargs) [ 713.354709] env[69367]: ERROR nova.compute.manager [instance: ab365570-ac29-4094-be4c-d49563a465c8] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 713.354709] env[69367]: ERROR nova.compute.manager [instance: ab365570-ac29-4094-be4c-d49563a465c8] self._update(context.elevated(), self.compute_nodes[nodename]) [ 713.354709] env[69367]: ERROR nova.compute.manager [instance: ab365570-ac29-4094-be4c-d49563a465c8] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 713.354709] env[69367]: ERROR nova.compute.manager [instance: ab365570-ac29-4094-be4c-d49563a465c8] self._update_to_placement(context, compute_node, startup) [ 713.354709] env[69367]: ERROR nova.compute.manager [instance: ab365570-ac29-4094-be4c-d49563a465c8] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 713.354709] env[69367]: ERROR nova.compute.manager [instance: ab365570-ac29-4094-be4c-d49563a465c8] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 713.358556] env[69367]: ERROR nova.compute.manager [instance: ab365570-ac29-4094-be4c-d49563a465c8] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 713.358556] env[69367]: ERROR nova.compute.manager [instance: ab365570-ac29-4094-be4c-d49563a465c8] return attempt.get(self._wrap_exception) [ 713.358556] env[69367]: ERROR nova.compute.manager [instance: ab365570-ac29-4094-be4c-d49563a465c8] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 713.358556] env[69367]: ERROR nova.compute.manager [instance: ab365570-ac29-4094-be4c-d49563a465c8] six.reraise(self.value[0], self.value[1], self.value[2]) [ 713.358556] env[69367]: ERROR nova.compute.manager [instance: ab365570-ac29-4094-be4c-d49563a465c8] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 713.358556] env[69367]: ERROR nova.compute.manager [instance: ab365570-ac29-4094-be4c-d49563a465c8] raise value [ 713.358556] env[69367]: ERROR nova.compute.manager [instance: ab365570-ac29-4094-be4c-d49563a465c8] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 713.358556] env[69367]: ERROR nova.compute.manager [instance: ab365570-ac29-4094-be4c-d49563a465c8] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 713.358556] env[69367]: ERROR nova.compute.manager [instance: ab365570-ac29-4094-be4c-d49563a465c8] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 713.358556] env[69367]: ERROR nova.compute.manager [instance: ab365570-ac29-4094-be4c-d49563a465c8] self.reportclient.update_from_provider_tree( [ 713.358556] env[69367]: ERROR nova.compute.manager [instance: ab365570-ac29-4094-be4c-d49563a465c8] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 713.358556] env[69367]: ERROR nova.compute.manager [instance: ab365570-ac29-4094-be4c-d49563a465c8] with catch_all(pd.uuid): [ 713.358556] env[69367]: ERROR nova.compute.manager [instance: ab365570-ac29-4094-be4c-d49563a465c8] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 713.358862] env[69367]: ERROR nova.compute.manager [instance: ab365570-ac29-4094-be4c-d49563a465c8] self.gen.throw(typ, value, traceback) [ 713.358862] env[69367]: ERROR nova.compute.manager [instance: ab365570-ac29-4094-be4c-d49563a465c8] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 713.358862] env[69367]: ERROR nova.compute.manager [instance: ab365570-ac29-4094-be4c-d49563a465c8] raise exception.ResourceProviderSyncFailed() [ 713.358862] env[69367]: ERROR nova.compute.manager [instance: ab365570-ac29-4094-be4c-d49563a465c8] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 713.358862] env[69367]: ERROR nova.compute.manager [instance: ab365570-ac29-4094-be4c-d49563a465c8] [ 713.358862] env[69367]: DEBUG oslo_concurrency.lockutils [None req-bec6056e-20f1-4df8-b299-df21fda76884 tempest-ServersV294TestFqdnHostnames-2008691314 tempest-ServersV294TestFqdnHostnames-2008691314-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.263s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 713.358862] env[69367]: INFO nova.compute.claims [None req-bec6056e-20f1-4df8-b299-df21fda76884 tempest-ServersV294TestFqdnHostnames-2008691314 tempest-ServersV294TestFqdnHostnames-2008691314-project-member] [instance: 73d75c52-7ac9-4a28-8bfd-855fba7950b6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 713.368506] env[69367]: DEBUG oslo_vmware.api [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Task: {'id': task-4233895, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.550974] env[69367]: DEBUG oslo_vmware.api [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Task: {'id': task-4233894, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.167948} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.551727] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] Extended root virtual disk {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 713.552867] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-345b3901-5a69-49f3-a740-15423e6ccbbc {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.581357] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] Reconfiguring VM instance instance-00000022 to attach disk [datastore2] 788b843c-1496-4562-a761-44f3e1ce6da2/788b843c-1496-4562-a761-44f3e1ce6da2.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 713.581357] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e7c4b7d0-cbe0-427c-9025-cdee7dfe06fa {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.603954] env[69367]: DEBUG oslo_vmware.api [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Waiting for the task: (returnval){ [ 713.603954] env[69367]: value = "task-4233896" [ 713.603954] env[69367]: _type = "Task" [ 713.603954] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.612249] env[69367]: DEBUG oslo_vmware.api [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Task: {'id': task-4233896, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.652309] env[69367]: DEBUG oslo_concurrency.lockutils [None req-02258f01-0f7a-47d5-8f19-e7e1d8fc8a57 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 713.667515] env[69367]: DEBUG oslo_concurrency.lockutils [None req-785c96af-d561-41aa-8b94-aba2c186e78c tempest-ServerActionsTestOtherA-1360477532 tempest-ServerActionsTestOtherA-1360477532-project-member] Lock "26418f26-07ae-45e4-87d6-bdcf99674fb5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.481s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 713.857835] env[69367]: DEBUG oslo_vmware.api [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Task: {'id': task-4233895, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.862022] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91e9d1ea-2ff5-4ae3-a101-b6ada7c4acd5 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Lock "ab365570-ac29-4094-be4c-d49563a465c8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.486s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 714.114563] env[69367]: DEBUG oslo_vmware.api [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Task: {'id': task-4233896, 'name': ReconfigVM_Task, 'duration_secs': 0.291026} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.114881] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] Reconfigured VM instance instance-00000022 to attach disk [datastore2] 788b843c-1496-4562-a761-44f3e1ce6da2/788b843c-1496-4562-a761-44f3e1ce6da2.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 714.115540] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f4ec7c4d-9575-4c90-94f9-66e45ec10f6e {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.123400] env[69367]: DEBUG oslo_vmware.api [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Waiting for the task: (returnval){ [ 714.123400] env[69367]: value = "task-4233897" [ 714.123400] env[69367]: _type = "Task" [ 714.123400] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.133121] env[69367]: DEBUG oslo_vmware.api [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Task: {'id': task-4233897, 'name': Rename_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.171474] env[69367]: DEBUG nova.compute.manager [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 714.358647] env[69367]: DEBUG oslo_vmware.api [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Task: {'id': task-4233895, 'name': PowerOnVM_Task, 'duration_secs': 0.838776} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.358944] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] [instance: bdc0938b-60ef-463a-b3fd-1754f38a3b79] Powered on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 714.358944] env[69367]: INFO nova.compute.manager [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] [instance: bdc0938b-60ef-463a-b3fd-1754f38a3b79] Took 12.42 seconds to spawn the instance on the hypervisor. [ 714.359690] env[69367]: DEBUG nova.compute.manager [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] [instance: bdc0938b-60ef-463a-b3fd-1754f38a3b79] Checking state {{(pid=69367) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 714.359950] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cde85272-f875-44d6-924e-bb53eda0f23c {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.485738] env[69367]: DEBUG nova.scheduler.client.report [None req-bec6056e-20f1-4df8-b299-df21fda76884 tempest-ServersV294TestFqdnHostnames-2008691314 tempest-ServersV294TestFqdnHostnames-2008691314-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 714.505278] env[69367]: DEBUG nova.scheduler.client.report [None req-bec6056e-20f1-4df8-b299-df21fda76884 tempest-ServersV294TestFqdnHostnames-2008691314 tempest-ServersV294TestFqdnHostnames-2008691314-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 714.505588] env[69367]: DEBUG nova.compute.provider_tree [None req-bec6056e-20f1-4df8-b299-df21fda76884 tempest-ServersV294TestFqdnHostnames-2008691314 tempest-ServersV294TestFqdnHostnames-2008691314-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 714.518932] env[69367]: DEBUG nova.scheduler.client.report [None req-bec6056e-20f1-4df8-b299-df21fda76884 tempest-ServersV294TestFqdnHostnames-2008691314 tempest-ServersV294TestFqdnHostnames-2008691314-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 714.536147] env[69367]: DEBUG nova.scheduler.client.report [None req-bec6056e-20f1-4df8-b299-df21fda76884 tempest-ServersV294TestFqdnHostnames-2008691314 tempest-ServersV294TestFqdnHostnames-2008691314-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 714.638924] env[69367]: DEBUG oslo_vmware.api [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Task: {'id': task-4233897, 'name': Rename_Task, 'duration_secs': 0.177854} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.639235] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] Powering on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 714.639483] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e8b2b8e5-0577-489c-b511-ab4d07f091ab {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.650875] env[69367]: DEBUG oslo_vmware.api [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Waiting for the task: (returnval){ [ 714.650875] env[69367]: value = "task-4233898" [ 714.650875] env[69367]: _type = "Task" [ 714.650875] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.662535] env[69367]: DEBUG oslo_vmware.api [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Task: {'id': task-4233898, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.696401] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 714.872740] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Acquiring lock "7f937d89-684b-44f5-9f30-783aeafe99d1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 714.872981] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Lock "7f937d89-684b-44f5-9f30-783aeafe99d1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 714.888399] env[69367]: INFO nova.compute.manager [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] [instance: bdc0938b-60ef-463a-b3fd-1754f38a3b79] Took 32.14 seconds to build instance. [ 714.928507] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Acquiring lock "ab9d8e3e-65c5-4ac9-920f-3042b8cf2054" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 714.928835] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Lock "ab9d8e3e-65c5-4ac9-920f-3042b8cf2054" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 715.045280] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9af8253b-7043-4c77-96ef-19d56193d0e3 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.056310] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b77eadad-139f-42ba-8e89-b6dbf97649fa {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.094677] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68d37c79-3c09-4d89-a9a5-89f4aa9965b9 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.106021] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e19e3e8-e7f7-4c61-87ad-5d49c97ad906 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.122021] env[69367]: DEBUG nova.compute.provider_tree [None req-bec6056e-20f1-4df8-b299-df21fda76884 tempest-ServersV294TestFqdnHostnames-2008691314 tempest-ServersV294TestFqdnHostnames-2008691314-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 715.163933] env[69367]: DEBUG oslo_vmware.api [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Task: {'id': task-4233898, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.297032] env[69367]: DEBUG oslo_concurrency.lockutils [None req-b4416836-0f76-4b31-aa99-b0fec3a75acb tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Acquiring lock "10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 715.297294] env[69367]: DEBUG oslo_concurrency.lockutils [None req-b4416836-0f76-4b31-aa99-b0fec3a75acb tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Lock "10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 715.297509] env[69367]: DEBUG oslo_concurrency.lockutils [None req-b4416836-0f76-4b31-aa99-b0fec3a75acb tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Acquiring lock "10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 715.297674] env[69367]: DEBUG oslo_concurrency.lockutils [None req-b4416836-0f76-4b31-aa99-b0fec3a75acb tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Lock "10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 715.297835] env[69367]: DEBUG oslo_concurrency.lockutils [None req-b4416836-0f76-4b31-aa99-b0fec3a75acb tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Lock "10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 715.300290] env[69367]: INFO nova.compute.manager [None req-b4416836-0f76-4b31-aa99-b0fec3a75acb tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] Terminating instance [ 715.379083] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91e9d1ea-2ff5-4ae3-a101-b6ada7c4acd5 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 715.396044] env[69367]: DEBUG oslo_concurrency.lockutils [None req-ca0e726a-37fd-405c-8640-e70420ab4daa tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Lock "bdc0938b-60ef-463a-b3fd-1754f38a3b79" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.372s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 715.647650] env[69367]: ERROR nova.scheduler.client.report [None req-bec6056e-20f1-4df8-b299-df21fda76884 tempest-ServersV294TestFqdnHostnames-2008691314 tempest-ServersV294TestFqdnHostnames-2008691314-project-member] [req-e5ac9b97-bea5-4b93-8318-1d569f51da3f] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-e5ac9b97-bea5-4b93-8318-1d569f51da3f"}]} [ 715.648159] env[69367]: DEBUG oslo_concurrency.lockutils [None req-bec6056e-20f1-4df8-b299-df21fda76884 tempest-ServersV294TestFqdnHostnames-2008691314 tempest-ServersV294TestFqdnHostnames-2008691314-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.293s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 715.648644] env[69367]: ERROR nova.compute.manager [None req-bec6056e-20f1-4df8-b299-df21fda76884 tempest-ServersV294TestFqdnHostnames-2008691314 tempest-ServersV294TestFqdnHostnames-2008691314-project-member] [instance: 73d75c52-7ac9-4a28-8bfd-855fba7950b6] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 715.648644] env[69367]: ERROR nova.compute.manager [instance: 73d75c52-7ac9-4a28-8bfd-855fba7950b6] Traceback (most recent call last): [ 715.648644] env[69367]: ERROR nova.compute.manager [instance: 73d75c52-7ac9-4a28-8bfd-855fba7950b6] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 715.648644] env[69367]: ERROR nova.compute.manager [instance: 73d75c52-7ac9-4a28-8bfd-855fba7950b6] yield [ 715.648644] env[69367]: ERROR nova.compute.manager [instance: 73d75c52-7ac9-4a28-8bfd-855fba7950b6] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 715.648644] env[69367]: ERROR nova.compute.manager [instance: 73d75c52-7ac9-4a28-8bfd-855fba7950b6] self.set_inventory_for_provider( [ 715.648644] env[69367]: ERROR nova.compute.manager [instance: 73d75c52-7ac9-4a28-8bfd-855fba7950b6] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 715.648644] env[69367]: ERROR nova.compute.manager [instance: 73d75c52-7ac9-4a28-8bfd-855fba7950b6] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 715.648900] env[69367]: ERROR nova.compute.manager [instance: 73d75c52-7ac9-4a28-8bfd-855fba7950b6] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-e5ac9b97-bea5-4b93-8318-1d569f51da3f"}]} [ 715.648900] env[69367]: ERROR nova.compute.manager [instance: 73d75c52-7ac9-4a28-8bfd-855fba7950b6] [ 715.648900] env[69367]: ERROR nova.compute.manager [instance: 73d75c52-7ac9-4a28-8bfd-855fba7950b6] During handling of the above exception, another exception occurred: [ 715.648900] env[69367]: ERROR nova.compute.manager [instance: 73d75c52-7ac9-4a28-8bfd-855fba7950b6] [ 715.648900] env[69367]: ERROR nova.compute.manager [instance: 73d75c52-7ac9-4a28-8bfd-855fba7950b6] Traceback (most recent call last): [ 715.648900] env[69367]: ERROR nova.compute.manager [instance: 73d75c52-7ac9-4a28-8bfd-855fba7950b6] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 715.648900] env[69367]: ERROR nova.compute.manager [instance: 73d75c52-7ac9-4a28-8bfd-855fba7950b6] with self.rt.instance_claim(context, instance, node, allocs, [ 715.648900] env[69367]: ERROR nova.compute.manager [instance: 73d75c52-7ac9-4a28-8bfd-855fba7950b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 715.648900] env[69367]: ERROR nova.compute.manager [instance: 73d75c52-7ac9-4a28-8bfd-855fba7950b6] return f(*args, **kwargs) [ 715.649284] env[69367]: ERROR nova.compute.manager [instance: 73d75c52-7ac9-4a28-8bfd-855fba7950b6] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 715.649284] env[69367]: ERROR nova.compute.manager [instance: 73d75c52-7ac9-4a28-8bfd-855fba7950b6] self._update(elevated, cn) [ 715.649284] env[69367]: ERROR nova.compute.manager [instance: 73d75c52-7ac9-4a28-8bfd-855fba7950b6] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 715.649284] env[69367]: ERROR nova.compute.manager [instance: 73d75c52-7ac9-4a28-8bfd-855fba7950b6] self._update_to_placement(context, compute_node, startup) [ 715.649284] env[69367]: ERROR nova.compute.manager [instance: 73d75c52-7ac9-4a28-8bfd-855fba7950b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 715.649284] env[69367]: ERROR nova.compute.manager [instance: 73d75c52-7ac9-4a28-8bfd-855fba7950b6] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 715.649284] env[69367]: ERROR nova.compute.manager [instance: 73d75c52-7ac9-4a28-8bfd-855fba7950b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 715.649284] env[69367]: ERROR nova.compute.manager [instance: 73d75c52-7ac9-4a28-8bfd-855fba7950b6] return attempt.get(self._wrap_exception) [ 715.649284] env[69367]: ERROR nova.compute.manager [instance: 73d75c52-7ac9-4a28-8bfd-855fba7950b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 715.649284] env[69367]: ERROR nova.compute.manager [instance: 73d75c52-7ac9-4a28-8bfd-855fba7950b6] six.reraise(self.value[0], self.value[1], self.value[2]) [ 715.649284] env[69367]: ERROR nova.compute.manager [instance: 73d75c52-7ac9-4a28-8bfd-855fba7950b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 715.649284] env[69367]: ERROR nova.compute.manager [instance: 73d75c52-7ac9-4a28-8bfd-855fba7950b6] raise value [ 715.649284] env[69367]: ERROR nova.compute.manager [instance: 73d75c52-7ac9-4a28-8bfd-855fba7950b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 715.649650] env[69367]: ERROR nova.compute.manager [instance: 73d75c52-7ac9-4a28-8bfd-855fba7950b6] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 715.649650] env[69367]: ERROR nova.compute.manager [instance: 73d75c52-7ac9-4a28-8bfd-855fba7950b6] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 715.649650] env[69367]: ERROR nova.compute.manager [instance: 73d75c52-7ac9-4a28-8bfd-855fba7950b6] self.reportclient.update_from_provider_tree( [ 715.649650] env[69367]: ERROR nova.compute.manager [instance: 73d75c52-7ac9-4a28-8bfd-855fba7950b6] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 715.649650] env[69367]: ERROR nova.compute.manager [instance: 73d75c52-7ac9-4a28-8bfd-855fba7950b6] with catch_all(pd.uuid): [ 715.649650] env[69367]: ERROR nova.compute.manager [instance: 73d75c52-7ac9-4a28-8bfd-855fba7950b6] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 715.649650] env[69367]: ERROR nova.compute.manager [instance: 73d75c52-7ac9-4a28-8bfd-855fba7950b6] self.gen.throw(typ, value, traceback) [ 715.649650] env[69367]: ERROR nova.compute.manager [instance: 73d75c52-7ac9-4a28-8bfd-855fba7950b6] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 715.649650] env[69367]: ERROR nova.compute.manager [instance: 73d75c52-7ac9-4a28-8bfd-855fba7950b6] raise exception.ResourceProviderSyncFailed() [ 715.649650] env[69367]: ERROR nova.compute.manager [instance: 73d75c52-7ac9-4a28-8bfd-855fba7950b6] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 715.649650] env[69367]: ERROR nova.compute.manager [instance: 73d75c52-7ac9-4a28-8bfd-855fba7950b6] [ 715.649989] env[69367]: DEBUG nova.compute.utils [None req-bec6056e-20f1-4df8-b299-df21fda76884 tempest-ServersV294TestFqdnHostnames-2008691314 tempest-ServersV294TestFqdnHostnames-2008691314-project-member] [instance: 73d75c52-7ac9-4a28-8bfd-855fba7950b6] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 715.652158] env[69367]: DEBUG oslo_concurrency.lockutils [None req-be828dd9-f06a-4267-b660-1a75113835c1 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.843s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 715.652388] env[69367]: DEBUG nova.objects.instance [None req-be828dd9-f06a-4267-b660-1a75113835c1 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Lazy-loading 'resources' on Instance uuid 92c27615-d377-492f-a9db-ff45b2e71537 {{(pid=69367) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 715.657214] env[69367]: DEBUG nova.compute.manager [None req-bec6056e-20f1-4df8-b299-df21fda76884 tempest-ServersV294TestFqdnHostnames-2008691314 tempest-ServersV294TestFqdnHostnames-2008691314-project-member] [instance: 73d75c52-7ac9-4a28-8bfd-855fba7950b6] Build of instance 73d75c52-7ac9-4a28-8bfd-855fba7950b6 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 715.657646] env[69367]: DEBUG nova.compute.manager [None req-bec6056e-20f1-4df8-b299-df21fda76884 tempest-ServersV294TestFqdnHostnames-2008691314 tempest-ServersV294TestFqdnHostnames-2008691314-project-member] [instance: 73d75c52-7ac9-4a28-8bfd-855fba7950b6] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 715.657908] env[69367]: DEBUG oslo_concurrency.lockutils [None req-bec6056e-20f1-4df8-b299-df21fda76884 tempest-ServersV294TestFqdnHostnames-2008691314 tempest-ServersV294TestFqdnHostnames-2008691314-project-member] Acquiring lock "refresh_cache-73d75c52-7ac9-4a28-8bfd-855fba7950b6" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 715.659174] env[69367]: DEBUG oslo_concurrency.lockutils [None req-bec6056e-20f1-4df8-b299-df21fda76884 tempest-ServersV294TestFqdnHostnames-2008691314 tempest-ServersV294TestFqdnHostnames-2008691314-project-member] Acquired lock "refresh_cache-73d75c52-7ac9-4a28-8bfd-855fba7950b6" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 715.659383] env[69367]: DEBUG nova.network.neutron [None req-bec6056e-20f1-4df8-b299-df21fda76884 tempest-ServersV294TestFqdnHostnames-2008691314 tempest-ServersV294TestFqdnHostnames-2008691314-project-member] [instance: 73d75c52-7ac9-4a28-8bfd-855fba7950b6] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 715.671997] env[69367]: DEBUG oslo_vmware.api [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Task: {'id': task-4233898, 'name': PowerOnVM_Task, 'duration_secs': 0.517334} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.672316] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] Powered on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 715.672519] env[69367]: INFO nova.compute.manager [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] Took 9.02 seconds to spawn the instance on the hypervisor. [ 715.672697] env[69367]: DEBUG nova.compute.manager [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] Checking state {{(pid=69367) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 715.674349] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d4611e8-f833-4554-808c-1bb4a1974943 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.805429] env[69367]: DEBUG nova.compute.manager [None req-b4416836-0f76-4b31-aa99-b0fec3a75acb tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] Start destroying the instance on the hypervisor. {{(pid=69367) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 715.805613] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-b4416836-0f76-4b31-aa99-b0fec3a75acb tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] Destroying instance {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 715.807132] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13d54a29-fc51-4193-b81c-cb440679c9a8 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.817361] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4416836-0f76-4b31-aa99-b0fec3a75acb tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] Powering off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 715.817664] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-76d3c146-5e52-48b0-a08c-8eab4e63f864 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.826349] env[69367]: DEBUG oslo_vmware.api [None req-b4416836-0f76-4b31-aa99-b0fec3a75acb tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Waiting for the task: (returnval){ [ 715.826349] env[69367]: value = "task-4233899" [ 715.826349] env[69367]: _type = "Task" [ 715.826349] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.840076] env[69367]: DEBUG oslo_vmware.api [None req-b4416836-0f76-4b31-aa99-b0fec3a75acb tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Task: {'id': task-4233899, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.902756] env[69367]: DEBUG nova.compute.manager [None req-8cddd887-a3bc-4c75-aa6b-511f6208e23b tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: a0b99237-8f23-40ec-827f-af75961a096d] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 716.201019] env[69367]: DEBUG nova.scheduler.client.report [None req-be828dd9-f06a-4267-b660-1a75113835c1 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 716.203244] env[69367]: DEBUG nova.network.neutron [None req-bec6056e-20f1-4df8-b299-df21fda76884 tempest-ServersV294TestFqdnHostnames-2008691314 tempest-ServersV294TestFqdnHostnames-2008691314-project-member] [instance: 73d75c52-7ac9-4a28-8bfd-855fba7950b6] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 716.207137] env[69367]: INFO nova.compute.manager [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] Took 31.89 seconds to build instance. [ 716.222186] env[69367]: DEBUG nova.scheduler.client.report [None req-be828dd9-f06a-4267-b660-1a75113835c1 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 716.222439] env[69367]: DEBUG nova.compute.provider_tree [None req-be828dd9-f06a-4267-b660-1a75113835c1 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 716.234994] env[69367]: DEBUG nova.scheduler.client.report [None req-be828dd9-f06a-4267-b660-1a75113835c1 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 716.263296] env[69367]: DEBUG nova.scheduler.client.report [None req-be828dd9-f06a-4267-b660-1a75113835c1 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 716.294748] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5256b69c-c957-4645-bf1f-f7ae1efeab28 tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Acquiring lock "bdc0938b-60ef-463a-b3fd-1754f38a3b79" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 716.295150] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5256b69c-c957-4645-bf1f-f7ae1efeab28 tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Lock "bdc0938b-60ef-463a-b3fd-1754f38a3b79" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 716.295829] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5256b69c-c957-4645-bf1f-f7ae1efeab28 tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Acquiring lock "bdc0938b-60ef-463a-b3fd-1754f38a3b79-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 716.295829] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5256b69c-c957-4645-bf1f-f7ae1efeab28 tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Lock "bdc0938b-60ef-463a-b3fd-1754f38a3b79-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 716.296724] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5256b69c-c957-4645-bf1f-f7ae1efeab28 tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Lock "bdc0938b-60ef-463a-b3fd-1754f38a3b79-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 716.303438] env[69367]: INFO nova.compute.manager [None req-5256b69c-c957-4645-bf1f-f7ae1efeab28 tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] [instance: bdc0938b-60ef-463a-b3fd-1754f38a3b79] Terminating instance [ 716.341167] env[69367]: DEBUG oslo_vmware.api [None req-b4416836-0f76-4b31-aa99-b0fec3a75acb tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Task: {'id': task-4233899, 'name': PowerOffVM_Task, 'duration_secs': 0.360247} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.345022] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4416836-0f76-4b31-aa99-b0fec3a75acb tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] Powered off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 716.345432] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-b4416836-0f76-4b31-aa99-b0fec3a75acb tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] Unregistering the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 716.346114] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-07a99b2f-312e-4e09-8dfe-9411b55328ae {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.429066] env[69367]: DEBUG nova.network.neutron [None req-bec6056e-20f1-4df8-b299-df21fda76884 tempest-ServersV294TestFqdnHostnames-2008691314 tempest-ServersV294TestFqdnHostnames-2008691314-project-member] [instance: 73d75c52-7ac9-4a28-8bfd-855fba7950b6] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 716.431511] env[69367]: DEBUG oslo_concurrency.lockutils [None req-8cddd887-a3bc-4c75-aa6b-511f6208e23b tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 716.440879] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-b4416836-0f76-4b31-aa99-b0fec3a75acb tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] Unregistered the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 716.441129] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-b4416836-0f76-4b31-aa99-b0fec3a75acb tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] Deleting contents of the VM from datastore datastore2 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 716.441359] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4416836-0f76-4b31-aa99-b0fec3a75acb tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Deleting the datastore file [datastore2] 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57 {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 716.442291] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-629bec5b-ed0c-4f2b-acca-c2eb1ab382b9 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.451710] env[69367]: DEBUG oslo_vmware.api [None req-b4416836-0f76-4b31-aa99-b0fec3a75acb tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Waiting for the task: (returnval){ [ 716.451710] env[69367]: value = "task-4233901" [ 716.451710] env[69367]: _type = "Task" [ 716.451710] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.461050] env[69367]: DEBUG oslo_vmware.api [None req-b4416836-0f76-4b31-aa99-b0fec3a75acb tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Task: {'id': task-4233901, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.711287] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a054e73b-3097-4a8c-88a1-51c19c938e7e tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Lock "788b843c-1496-4562-a761-44f3e1ce6da2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 43.306s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 716.805275] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83053e27-7f2a-40b8-bf49-1beb915909fd {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.813197] env[69367]: DEBUG nova.compute.manager [None req-5256b69c-c957-4645-bf1f-f7ae1efeab28 tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] [instance: bdc0938b-60ef-463a-b3fd-1754f38a3b79] Start destroying the instance on the hypervisor. {{(pid=69367) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 716.813285] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-5256b69c-c957-4645-bf1f-f7ae1efeab28 tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] [instance: bdc0938b-60ef-463a-b3fd-1754f38a3b79] Destroying instance {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 716.814256] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e615882f-587f-4d59-8cc5-05c539e928b4 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.817994] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-260e38b4-93a8-4930-8e13-1ac3cd4e6caf {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.826721] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-5256b69c-c957-4645-bf1f-f7ae1efeab28 tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] [instance: bdc0938b-60ef-463a-b3fd-1754f38a3b79] Powering off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 716.858658] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d2951d76-3988-4bb5-b04a-1165d93f8d75 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.862860] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b208ef9-b90a-455f-96f6-4018d8b7dfcd {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.873121] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce7f8d8f-85c4-42d8-9d57-2a657b89aba6 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.882194] env[69367]: DEBUG oslo_vmware.api [None req-5256b69c-c957-4645-bf1f-f7ae1efeab28 tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Waiting for the task: (returnval){ [ 716.882194] env[69367]: value = "task-4233902" [ 716.882194] env[69367]: _type = "Task" [ 716.882194] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.895099] env[69367]: DEBUG nova.compute.provider_tree [None req-be828dd9-f06a-4267-b660-1a75113835c1 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 716.904186] env[69367]: DEBUG oslo_vmware.api [None req-5256b69c-c957-4645-bf1f-f7ae1efeab28 tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Task: {'id': task-4233902, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.936223] env[69367]: DEBUG oslo_concurrency.lockutils [None req-bec6056e-20f1-4df8-b299-df21fda76884 tempest-ServersV294TestFqdnHostnames-2008691314 tempest-ServersV294TestFqdnHostnames-2008691314-project-member] Releasing lock "refresh_cache-73d75c52-7ac9-4a28-8bfd-855fba7950b6" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 716.936560] env[69367]: DEBUG nova.compute.manager [None req-bec6056e-20f1-4df8-b299-df21fda76884 tempest-ServersV294TestFqdnHostnames-2008691314 tempest-ServersV294TestFqdnHostnames-2008691314-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 716.936815] env[69367]: DEBUG nova.compute.manager [None req-bec6056e-20f1-4df8-b299-df21fda76884 tempest-ServersV294TestFqdnHostnames-2008691314 tempest-ServersV294TestFqdnHostnames-2008691314-project-member] [instance: 73d75c52-7ac9-4a28-8bfd-855fba7950b6] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 716.937010] env[69367]: DEBUG nova.network.neutron [None req-bec6056e-20f1-4df8-b299-df21fda76884 tempest-ServersV294TestFqdnHostnames-2008691314 tempest-ServersV294TestFqdnHostnames-2008691314-project-member] [instance: 73d75c52-7ac9-4a28-8bfd-855fba7950b6] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 716.963847] env[69367]: DEBUG oslo_vmware.api [None req-b4416836-0f76-4b31-aa99-b0fec3a75acb tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Task: {'id': task-4233901, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.337095} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.964144] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4416836-0f76-4b31-aa99-b0fec3a75acb tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Deleted the datastore file {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 716.964346] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-b4416836-0f76-4b31-aa99-b0fec3a75acb tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] Deleted contents of the VM from datastore datastore2 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 716.964547] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-b4416836-0f76-4b31-aa99-b0fec3a75acb tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] Instance destroyed {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 716.964722] env[69367]: INFO nova.compute.manager [None req-b4416836-0f76-4b31-aa99-b0fec3a75acb tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] Took 1.16 seconds to destroy the instance on the hypervisor. [ 716.965415] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b4416836-0f76-4b31-aa99-b0fec3a75acb tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 716.965415] env[69367]: DEBUG nova.compute.manager [-] [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 716.965415] env[69367]: DEBUG nova.network.neutron [-] [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 716.972107] env[69367]: DEBUG nova.network.neutron [None req-bec6056e-20f1-4df8-b299-df21fda76884 tempest-ServersV294TestFqdnHostnames-2008691314 tempest-ServersV294TestFqdnHostnames-2008691314-project-member] [instance: 73d75c52-7ac9-4a28-8bfd-855fba7950b6] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 717.063741] env[69367]: DEBUG nova.compute.manager [req-f7cbb343-799a-4056-aa24-e63c59e7b8b6 req-cf8e1750-a47a-46a4-a2e4-d5749c5a7a6e service nova] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] Received event network-changed-df0ab2ea-5a93-4792-b937-b8327787a7b4 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 717.064013] env[69367]: DEBUG nova.compute.manager [req-f7cbb343-799a-4056-aa24-e63c59e7b8b6 req-cf8e1750-a47a-46a4-a2e4-d5749c5a7a6e service nova] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] Refreshing instance network info cache due to event network-changed-df0ab2ea-5a93-4792-b937-b8327787a7b4. {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11772}} [ 717.064214] env[69367]: DEBUG oslo_concurrency.lockutils [req-f7cbb343-799a-4056-aa24-e63c59e7b8b6 req-cf8e1750-a47a-46a4-a2e4-d5749c5a7a6e service nova] Acquiring lock "refresh_cache-788b843c-1496-4562-a761-44f3e1ce6da2" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 717.064384] env[69367]: DEBUG oslo_concurrency.lockutils [req-f7cbb343-799a-4056-aa24-e63c59e7b8b6 req-cf8e1750-a47a-46a4-a2e4-d5749c5a7a6e service nova] Acquired lock "refresh_cache-788b843c-1496-4562-a761-44f3e1ce6da2" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 717.064541] env[69367]: DEBUG nova.network.neutron [req-f7cbb343-799a-4056-aa24-e63c59e7b8b6 req-cf8e1750-a47a-46a4-a2e4-d5749c5a7a6e service nova] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] Refreshing network info cache for port df0ab2ea-5a93-4792-b937-b8327787a7b4 {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 717.213854] env[69367]: DEBUG nova.compute.manager [None req-02fec56c-5d8a-44e3-9ebd-481a7397161f tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: 37f7f69e-e68a-42cf-8a7c-a3146f664c9a] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 717.397425] env[69367]: DEBUG oslo_vmware.api [None req-5256b69c-c957-4645-bf1f-f7ae1efeab28 tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Task: {'id': task-4233902, 'name': PowerOffVM_Task, 'duration_secs': 0.195911} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.400521] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-5256b69c-c957-4645-bf1f-f7ae1efeab28 tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] [instance: bdc0938b-60ef-463a-b3fd-1754f38a3b79] Powered off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 717.400787] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-5256b69c-c957-4645-bf1f-f7ae1efeab28 tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] [instance: bdc0938b-60ef-463a-b3fd-1754f38a3b79] Unregistering the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 717.404172] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3d251ea4-5a39-4d60-9c60-0eba9242ed43 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.427747] env[69367]: ERROR nova.scheduler.client.report [None req-be828dd9-f06a-4267-b660-1a75113835c1 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] [req-69f917c4-a53d-4900-ab81-32e496e7ed75] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-69f917c4-a53d-4900-ab81-32e496e7ed75"}]} [ 717.428246] env[69367]: DEBUG oslo_concurrency.lockutils [None req-be828dd9-f06a-4267-b660-1a75113835c1 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.776s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 717.428884] env[69367]: ERROR nova.compute.manager [None req-be828dd9-f06a-4267-b660-1a75113835c1 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] [instance: 92c27615-d377-492f-a9db-ff45b2e71537] Setting instance vm_state to ERROR: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 717.428884] env[69367]: ERROR nova.compute.manager [instance: 92c27615-d377-492f-a9db-ff45b2e71537] Traceback (most recent call last): [ 717.428884] env[69367]: ERROR nova.compute.manager [instance: 92c27615-d377-492f-a9db-ff45b2e71537] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 717.428884] env[69367]: ERROR nova.compute.manager [instance: 92c27615-d377-492f-a9db-ff45b2e71537] yield [ 717.428884] env[69367]: ERROR nova.compute.manager [instance: 92c27615-d377-492f-a9db-ff45b2e71537] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 717.428884] env[69367]: ERROR nova.compute.manager [instance: 92c27615-d377-492f-a9db-ff45b2e71537] self.set_inventory_for_provider( [ 717.428884] env[69367]: ERROR nova.compute.manager [instance: 92c27615-d377-492f-a9db-ff45b2e71537] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 717.428884] env[69367]: ERROR nova.compute.manager [instance: 92c27615-d377-492f-a9db-ff45b2e71537] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 717.429374] env[69367]: ERROR nova.compute.manager [instance: 92c27615-d377-492f-a9db-ff45b2e71537] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-69f917c4-a53d-4900-ab81-32e496e7ed75"}]} [ 717.429374] env[69367]: ERROR nova.compute.manager [instance: 92c27615-d377-492f-a9db-ff45b2e71537] [ 717.429374] env[69367]: ERROR nova.compute.manager [instance: 92c27615-d377-492f-a9db-ff45b2e71537] During handling of the above exception, another exception occurred: [ 717.429374] env[69367]: ERROR nova.compute.manager [instance: 92c27615-d377-492f-a9db-ff45b2e71537] [ 717.429374] env[69367]: ERROR nova.compute.manager [instance: 92c27615-d377-492f-a9db-ff45b2e71537] Traceback (most recent call last): [ 717.429374] env[69367]: ERROR nova.compute.manager [instance: 92c27615-d377-492f-a9db-ff45b2e71537] File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 717.429374] env[69367]: ERROR nova.compute.manager [instance: 92c27615-d377-492f-a9db-ff45b2e71537] self._delete_instance(context, instance, bdms) [ 717.429374] env[69367]: ERROR nova.compute.manager [instance: 92c27615-d377-492f-a9db-ff45b2e71537] File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 717.429374] env[69367]: ERROR nova.compute.manager [instance: 92c27615-d377-492f-a9db-ff45b2e71537] self._complete_deletion(context, instance) [ 717.429614] env[69367]: ERROR nova.compute.manager [instance: 92c27615-d377-492f-a9db-ff45b2e71537] File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 717.429614] env[69367]: ERROR nova.compute.manager [instance: 92c27615-d377-492f-a9db-ff45b2e71537] self._update_resource_tracker(context, instance) [ 717.429614] env[69367]: ERROR nova.compute.manager [instance: 92c27615-d377-492f-a9db-ff45b2e71537] File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 717.429614] env[69367]: ERROR nova.compute.manager [instance: 92c27615-d377-492f-a9db-ff45b2e71537] self.rt.update_usage(context, instance, instance.node) [ 717.429614] env[69367]: ERROR nova.compute.manager [instance: 92c27615-d377-492f-a9db-ff45b2e71537] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 717.429614] env[69367]: ERROR nova.compute.manager [instance: 92c27615-d377-492f-a9db-ff45b2e71537] return f(*args, **kwargs) [ 717.429614] env[69367]: ERROR nova.compute.manager [instance: 92c27615-d377-492f-a9db-ff45b2e71537] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 717.429614] env[69367]: ERROR nova.compute.manager [instance: 92c27615-d377-492f-a9db-ff45b2e71537] self._update(context.elevated(), self.compute_nodes[nodename]) [ 717.429614] env[69367]: ERROR nova.compute.manager [instance: 92c27615-d377-492f-a9db-ff45b2e71537] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 717.429614] env[69367]: ERROR nova.compute.manager [instance: 92c27615-d377-492f-a9db-ff45b2e71537] self._update_to_placement(context, compute_node, startup) [ 717.429614] env[69367]: ERROR nova.compute.manager [instance: 92c27615-d377-492f-a9db-ff45b2e71537] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 717.429614] env[69367]: ERROR nova.compute.manager [instance: 92c27615-d377-492f-a9db-ff45b2e71537] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 717.429915] env[69367]: ERROR nova.compute.manager [instance: 92c27615-d377-492f-a9db-ff45b2e71537] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 717.429915] env[69367]: ERROR nova.compute.manager [instance: 92c27615-d377-492f-a9db-ff45b2e71537] return attempt.get(self._wrap_exception) [ 717.429915] env[69367]: ERROR nova.compute.manager [instance: 92c27615-d377-492f-a9db-ff45b2e71537] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 717.429915] env[69367]: ERROR nova.compute.manager [instance: 92c27615-d377-492f-a9db-ff45b2e71537] six.reraise(self.value[0], self.value[1], self.value[2]) [ 717.429915] env[69367]: ERROR nova.compute.manager [instance: 92c27615-d377-492f-a9db-ff45b2e71537] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 717.429915] env[69367]: ERROR nova.compute.manager [instance: 92c27615-d377-492f-a9db-ff45b2e71537] raise value [ 717.429915] env[69367]: ERROR nova.compute.manager [instance: 92c27615-d377-492f-a9db-ff45b2e71537] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 717.429915] env[69367]: ERROR nova.compute.manager [instance: 92c27615-d377-492f-a9db-ff45b2e71537] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 717.429915] env[69367]: ERROR nova.compute.manager [instance: 92c27615-d377-492f-a9db-ff45b2e71537] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 717.429915] env[69367]: ERROR nova.compute.manager [instance: 92c27615-d377-492f-a9db-ff45b2e71537] self.reportclient.update_from_provider_tree( [ 717.429915] env[69367]: ERROR nova.compute.manager [instance: 92c27615-d377-492f-a9db-ff45b2e71537] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 717.429915] env[69367]: ERROR nova.compute.manager [instance: 92c27615-d377-492f-a9db-ff45b2e71537] with catch_all(pd.uuid): [ 717.429915] env[69367]: ERROR nova.compute.manager [instance: 92c27615-d377-492f-a9db-ff45b2e71537] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 717.430304] env[69367]: ERROR nova.compute.manager [instance: 92c27615-d377-492f-a9db-ff45b2e71537] self.gen.throw(typ, value, traceback) [ 717.430304] env[69367]: ERROR nova.compute.manager [instance: 92c27615-d377-492f-a9db-ff45b2e71537] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 717.430304] env[69367]: ERROR nova.compute.manager [instance: 92c27615-d377-492f-a9db-ff45b2e71537] raise exception.ResourceProviderSyncFailed() [ 717.430304] env[69367]: ERROR nova.compute.manager [instance: 92c27615-d377-492f-a9db-ff45b2e71537] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 717.430304] env[69367]: ERROR nova.compute.manager [instance: 92c27615-d377-492f-a9db-ff45b2e71537] [ 717.431508] env[69367]: DEBUG oslo_concurrency.lockutils [None req-1f53a5e1-fbe5-4540-af3f-45cfe8ec9f64 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.203s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 717.433096] env[69367]: INFO nova.compute.claims [None req-1f53a5e1-fbe5-4540-af3f-45cfe8ec9f64 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 25be9c82-df06-498d-b5e7-c59e0ceed475] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 717.475258] env[69367]: DEBUG nova.network.neutron [None req-bec6056e-20f1-4df8-b299-df21fda76884 tempest-ServersV294TestFqdnHostnames-2008691314 tempest-ServersV294TestFqdnHostnames-2008691314-project-member] [instance: 73d75c52-7ac9-4a28-8bfd-855fba7950b6] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 717.476482] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-5256b69c-c957-4645-bf1f-f7ae1efeab28 tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] [instance: bdc0938b-60ef-463a-b3fd-1754f38a3b79] Unregistered the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 717.476721] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-5256b69c-c957-4645-bf1f-f7ae1efeab28 tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] [instance: bdc0938b-60ef-463a-b3fd-1754f38a3b79] Deleting contents of the VM from datastore datastore2 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 717.476965] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-5256b69c-c957-4645-bf1f-f7ae1efeab28 tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Deleting the datastore file [datastore2] bdc0938b-60ef-463a-b3fd-1754f38a3b79 {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 717.477733] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bd21a870-04f5-403b-8faa-d4f4c8efa023 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.486097] env[69367]: DEBUG oslo_vmware.api [None req-5256b69c-c957-4645-bf1f-f7ae1efeab28 tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Waiting for the task: (returnval){ [ 717.486097] env[69367]: value = "task-4233904" [ 717.486097] env[69367]: _type = "Task" [ 717.486097] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.496347] env[69367]: DEBUG oslo_vmware.api [None req-5256b69c-c957-4645-bf1f-f7ae1efeab28 tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Task: {'id': task-4233904, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.625739] env[69367]: DEBUG nova.compute.manager [req-3516cab0-b663-425c-9284-df84dabfae5f req-b6532daa-927a-46af-9484-bf107ce85896 service nova] [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] Received event network-vif-deleted-91a80978-9bf2-4ee0-95e7-500f762bec77 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 717.626050] env[69367]: INFO nova.compute.manager [req-3516cab0-b663-425c-9284-df84dabfae5f req-b6532daa-927a-46af-9484-bf107ce85896 service nova] [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] Neutron deleted interface 91a80978-9bf2-4ee0-95e7-500f762bec77; detaching it from the instance and deleting it from the info cache [ 717.626262] env[69367]: DEBUG nova.network.neutron [req-3516cab0-b663-425c-9284-df84dabfae5f req-b6532daa-927a-46af-9484-bf107ce85896 service nova] [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 717.719762] env[69367]: DEBUG nova.compute.manager [None req-02fec56c-5d8a-44e3-9ebd-481a7397161f tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: 37f7f69e-e68a-42cf-8a7c-a3146f664c9a] Instance disappeared before build. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2446}} [ 717.868675] env[69367]: DEBUG oslo_concurrency.lockutils [None req-db64bc43-1101-4927-8092-095f6fb60a66 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Acquiring lock "788b843c-1496-4562-a761-44f3e1ce6da2" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 717.869027] env[69367]: DEBUG oslo_concurrency.lockutils [None req-db64bc43-1101-4927-8092-095f6fb60a66 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Lock "788b843c-1496-4562-a761-44f3e1ce6da2" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.001s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 717.870323] env[69367]: INFO nova.compute.manager [None req-db64bc43-1101-4927-8092-095f6fb60a66 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] Rebooting instance [ 717.878230] env[69367]: DEBUG nova.network.neutron [req-f7cbb343-799a-4056-aa24-e63c59e7b8b6 req-cf8e1750-a47a-46a4-a2e4-d5749c5a7a6e service nova] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] Updated VIF entry in instance network info cache for port df0ab2ea-5a93-4792-b937-b8327787a7b4. {{(pid=69367) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 717.878230] env[69367]: DEBUG nova.network.neutron [req-f7cbb343-799a-4056-aa24-e63c59e7b8b6 req-cf8e1750-a47a-46a4-a2e4-d5749c5a7a6e service nova] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] Updating instance_info_cache with network_info: [{"id": "df0ab2ea-5a93-4792-b937-b8327787a7b4", "address": "fa:16:3e:ab:f3:e3", "network": {"id": "dd68ce65-5682-4b4c-913c-cf699d2146be", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-341319856-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2375d6603eef45069be4a3541519002a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2c019b6-3ef3-4c8f-95bd-edede2c554a9", "external-id": "nsx-vlan-transportzone-364", "segmentation_id": 364, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdf0ab2ea-5a", "ovs_interfaceid": "df0ab2ea-5a93-4792-b937-b8327787a7b4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 717.907931] env[69367]: DEBUG nova.network.neutron [-] [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 717.938600] env[69367]: DEBUG oslo_concurrency.lockutils [None req-be828dd9-f06a-4267-b660-1a75113835c1 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Lock "92c27615-d377-492f-a9db-ff45b2e71537" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.225s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 717.979077] env[69367]: INFO nova.compute.manager [None req-bec6056e-20f1-4df8-b299-df21fda76884 tempest-ServersV294TestFqdnHostnames-2008691314 tempest-ServersV294TestFqdnHostnames-2008691314-project-member] [instance: 73d75c52-7ac9-4a28-8bfd-855fba7950b6] Took 1.04 seconds to deallocate network for instance. [ 718.001151] env[69367]: DEBUG oslo_vmware.api [None req-5256b69c-c957-4645-bf1f-f7ae1efeab28 tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Task: {'id': task-4233904, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.347823} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.001597] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-5256b69c-c957-4645-bf1f-f7ae1efeab28 tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Deleted the datastore file {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 718.002009] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-5256b69c-c957-4645-bf1f-f7ae1efeab28 tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] [instance: bdc0938b-60ef-463a-b3fd-1754f38a3b79] Deleted contents of the VM from datastore datastore2 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 718.002486] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-5256b69c-c957-4645-bf1f-f7ae1efeab28 tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] [instance: bdc0938b-60ef-463a-b3fd-1754f38a3b79] Instance destroyed {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 718.003237] env[69367]: INFO nova.compute.manager [None req-5256b69c-c957-4645-bf1f-f7ae1efeab28 tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] [instance: bdc0938b-60ef-463a-b3fd-1754f38a3b79] Took 1.19 seconds to destroy the instance on the hypervisor. [ 718.003655] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5256b69c-c957-4645-bf1f-f7ae1efeab28 tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 718.004802] env[69367]: DEBUG nova.compute.manager [-] [instance: bdc0938b-60ef-463a-b3fd-1754f38a3b79] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 718.004993] env[69367]: DEBUG nova.network.neutron [-] [instance: bdc0938b-60ef-463a-b3fd-1754f38a3b79] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 718.129127] env[69367]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7a73903d-8620-45d7-b138-d2ca232d3afa {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.139678] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7ed283c-007a-44a2-9ff5-670b369fc940 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.171597] env[69367]: DEBUG nova.compute.manager [req-3516cab0-b663-425c-9284-df84dabfae5f req-b6532daa-927a-46af-9484-bf107ce85896 service nova] [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] Detach interface failed, port_id=91a80978-9bf2-4ee0-95e7-500f762bec77, reason: Instance 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57 could not be found. {{(pid=69367) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11601}} [ 718.237883] env[69367]: DEBUG oslo_concurrency.lockutils [None req-02fec56c-5d8a-44e3-9ebd-481a7397161f tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Lock "37f7f69e-e68a-42cf-8a7c-a3146f664c9a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.762s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 718.386916] env[69367]: DEBUG oslo_concurrency.lockutils [req-f7cbb343-799a-4056-aa24-e63c59e7b8b6 req-cf8e1750-a47a-46a4-a2e4-d5749c5a7a6e service nova] Releasing lock "refresh_cache-788b843c-1496-4562-a761-44f3e1ce6da2" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 718.419147] env[69367]: DEBUG oslo_concurrency.lockutils [None req-db64bc43-1101-4927-8092-095f6fb60a66 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Acquiring lock "refresh_cache-788b843c-1496-4562-a761-44f3e1ce6da2" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 718.419216] env[69367]: DEBUG oslo_concurrency.lockutils [None req-db64bc43-1101-4927-8092-095f6fb60a66 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Acquired lock "refresh_cache-788b843c-1496-4562-a761-44f3e1ce6da2" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 718.419540] env[69367]: DEBUG nova.network.neutron [None req-db64bc43-1101-4927-8092-095f6fb60a66 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 718.420803] env[69367]: INFO nova.compute.manager [-] [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] Took 1.46 seconds to deallocate network for instance. [ 718.476285] env[69367]: DEBUG nova.scheduler.client.report [None req-1f53a5e1-fbe5-4540-af3f-45cfe8ec9f64 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 718.503173] env[69367]: DEBUG nova.scheduler.client.report [None req-1f53a5e1-fbe5-4540-af3f-45cfe8ec9f64 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 718.503619] env[69367]: DEBUG nova.compute.provider_tree [None req-1f53a5e1-fbe5-4540-af3f-45cfe8ec9f64 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 718.520592] env[69367]: DEBUG nova.scheduler.client.report [None req-1f53a5e1-fbe5-4540-af3f-45cfe8ec9f64 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 718.543342] env[69367]: DEBUG nova.scheduler.client.report [None req-1f53a5e1-fbe5-4540-af3f-45cfe8ec9f64 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 718.741171] env[69367]: DEBUG nova.compute.manager [None req-6d12701b-472c-4f21-b3e9-0df43501af60 tempest-ServerActionsV293TestJSON-1803979304 tempest-ServerActionsV293TestJSON-1803979304-project-member] [instance: e6a9b69c-e00d-4260-84c7-2d98ce80ead0] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 718.929939] env[69367]: DEBUG oslo_concurrency.lockutils [None req-b4416836-0f76-4b31-aa99-b0fec3a75acb tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 719.051169] env[69367]: INFO nova.scheduler.client.report [None req-bec6056e-20f1-4df8-b299-df21fda76884 tempest-ServersV294TestFqdnHostnames-2008691314 tempest-ServersV294TestFqdnHostnames-2008691314-project-member] Deleted allocations for instance 73d75c52-7ac9-4a28-8bfd-855fba7950b6 [ 719.098149] env[69367]: DEBUG nova.network.neutron [-] [instance: bdc0938b-60ef-463a-b3fd-1754f38a3b79] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 719.231800] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e01d761-52d6-433e-ac99-f43c2d0d2e93 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.241038] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb6392a4-948e-4789-8e63-da0a70d9bb8e {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.280037] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1f732e4-252b-4292-9458-3cbfccd5fd22 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.290731] env[69367]: DEBUG oslo_concurrency.lockutils [None req-6d12701b-472c-4f21-b3e9-0df43501af60 tempest-ServerActionsV293TestJSON-1803979304 tempest-ServerActionsV293TestJSON-1803979304-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 719.295478] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c2db734-9237-4cbd-9a56-0c5d55a090ef {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.315733] env[69367]: DEBUG nova.compute.provider_tree [None req-1f53a5e1-fbe5-4540-af3f-45cfe8ec9f64 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 719.453783] env[69367]: DEBUG oslo_concurrency.lockutils [None req-be828dd9-f06a-4267-b660-1a75113835c1 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 719.564298] env[69367]: DEBUG oslo_concurrency.lockutils [None req-bec6056e-20f1-4df8-b299-df21fda76884 tempest-ServersV294TestFqdnHostnames-2008691314 tempest-ServersV294TestFqdnHostnames-2008691314-project-member] Lock "73d75c52-7ac9-4a28-8bfd-855fba7950b6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.781s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 719.601500] env[69367]: INFO nova.compute.manager [-] [instance: bdc0938b-60ef-463a-b3fd-1754f38a3b79] Took 1.60 seconds to deallocate network for instance. [ 719.720470] env[69367]: DEBUG nova.compute.manager [req-ad9ff5cb-0ff0-4b15-a22b-1e29c33fa74b req-2c3073c1-7fb2-4756-bee0-03a193750351 service nova] [instance: bdc0938b-60ef-463a-b3fd-1754f38a3b79] Received event network-vif-deleted-975c2dda-8218-4379-9269-8b01752fffff {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 719.721420] env[69367]: DEBUG nova.network.neutron [None req-db64bc43-1101-4927-8092-095f6fb60a66 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] Updating instance_info_cache with network_info: [{"id": "df0ab2ea-5a93-4792-b937-b8327787a7b4", "address": "fa:16:3e:ab:f3:e3", "network": {"id": "dd68ce65-5682-4b4c-913c-cf699d2146be", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-341319856-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2375d6603eef45069be4a3541519002a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2c019b6-3ef3-4c8f-95bd-edede2c554a9", "external-id": "nsx-vlan-transportzone-364", "segmentation_id": 364, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdf0ab2ea-5a", "ovs_interfaceid": "df0ab2ea-5a93-4792-b937-b8327787a7b4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 719.845391] env[69367]: ERROR nova.scheduler.client.report [None req-1f53a5e1-fbe5-4540-af3f-45cfe8ec9f64 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [req-daf48d09-2ca0-483e-afa1-e915b17c587e] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-daf48d09-2ca0-483e-afa1-e915b17c587e"}]} [ 719.846111] env[69367]: DEBUG oslo_concurrency.lockutils [None req-1f53a5e1-fbe5-4540-af3f-45cfe8ec9f64 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.415s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 719.846738] env[69367]: ERROR nova.compute.manager [None req-1f53a5e1-fbe5-4540-af3f-45cfe8ec9f64 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 25be9c82-df06-498d-b5e7-c59e0ceed475] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 719.846738] env[69367]: ERROR nova.compute.manager [instance: 25be9c82-df06-498d-b5e7-c59e0ceed475] Traceback (most recent call last): [ 719.846738] env[69367]: ERROR nova.compute.manager [instance: 25be9c82-df06-498d-b5e7-c59e0ceed475] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 719.846738] env[69367]: ERROR nova.compute.manager [instance: 25be9c82-df06-498d-b5e7-c59e0ceed475] yield [ 719.846738] env[69367]: ERROR nova.compute.manager [instance: 25be9c82-df06-498d-b5e7-c59e0ceed475] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 719.846738] env[69367]: ERROR nova.compute.manager [instance: 25be9c82-df06-498d-b5e7-c59e0ceed475] self.set_inventory_for_provider( [ 719.846738] env[69367]: ERROR nova.compute.manager [instance: 25be9c82-df06-498d-b5e7-c59e0ceed475] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 719.846738] env[69367]: ERROR nova.compute.manager [instance: 25be9c82-df06-498d-b5e7-c59e0ceed475] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 719.847023] env[69367]: ERROR nova.compute.manager [instance: 25be9c82-df06-498d-b5e7-c59e0ceed475] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-daf48d09-2ca0-483e-afa1-e915b17c587e"}]} [ 719.847023] env[69367]: ERROR nova.compute.manager [instance: 25be9c82-df06-498d-b5e7-c59e0ceed475] [ 719.847023] env[69367]: ERROR nova.compute.manager [instance: 25be9c82-df06-498d-b5e7-c59e0ceed475] During handling of the above exception, another exception occurred: [ 719.847023] env[69367]: ERROR nova.compute.manager [instance: 25be9c82-df06-498d-b5e7-c59e0ceed475] [ 719.847023] env[69367]: ERROR nova.compute.manager [instance: 25be9c82-df06-498d-b5e7-c59e0ceed475] Traceback (most recent call last): [ 719.847023] env[69367]: ERROR nova.compute.manager [instance: 25be9c82-df06-498d-b5e7-c59e0ceed475] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 719.847023] env[69367]: ERROR nova.compute.manager [instance: 25be9c82-df06-498d-b5e7-c59e0ceed475] with self.rt.instance_claim(context, instance, node, allocs, [ 719.847023] env[69367]: ERROR nova.compute.manager [instance: 25be9c82-df06-498d-b5e7-c59e0ceed475] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 719.847023] env[69367]: ERROR nova.compute.manager [instance: 25be9c82-df06-498d-b5e7-c59e0ceed475] return f(*args, **kwargs) [ 719.847325] env[69367]: ERROR nova.compute.manager [instance: 25be9c82-df06-498d-b5e7-c59e0ceed475] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 719.847325] env[69367]: ERROR nova.compute.manager [instance: 25be9c82-df06-498d-b5e7-c59e0ceed475] self._update(elevated, cn) [ 719.847325] env[69367]: ERROR nova.compute.manager [instance: 25be9c82-df06-498d-b5e7-c59e0ceed475] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 719.847325] env[69367]: ERROR nova.compute.manager [instance: 25be9c82-df06-498d-b5e7-c59e0ceed475] self._update_to_placement(context, compute_node, startup) [ 719.847325] env[69367]: ERROR nova.compute.manager [instance: 25be9c82-df06-498d-b5e7-c59e0ceed475] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 719.847325] env[69367]: ERROR nova.compute.manager [instance: 25be9c82-df06-498d-b5e7-c59e0ceed475] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 719.847325] env[69367]: ERROR nova.compute.manager [instance: 25be9c82-df06-498d-b5e7-c59e0ceed475] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 719.847325] env[69367]: ERROR nova.compute.manager [instance: 25be9c82-df06-498d-b5e7-c59e0ceed475] return attempt.get(self._wrap_exception) [ 719.847325] env[69367]: ERROR nova.compute.manager [instance: 25be9c82-df06-498d-b5e7-c59e0ceed475] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 719.847325] env[69367]: ERROR nova.compute.manager [instance: 25be9c82-df06-498d-b5e7-c59e0ceed475] six.reraise(self.value[0], self.value[1], self.value[2]) [ 719.847325] env[69367]: ERROR nova.compute.manager [instance: 25be9c82-df06-498d-b5e7-c59e0ceed475] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 719.847325] env[69367]: ERROR nova.compute.manager [instance: 25be9c82-df06-498d-b5e7-c59e0ceed475] raise value [ 719.847325] env[69367]: ERROR nova.compute.manager [instance: 25be9c82-df06-498d-b5e7-c59e0ceed475] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 719.847673] env[69367]: ERROR nova.compute.manager [instance: 25be9c82-df06-498d-b5e7-c59e0ceed475] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 719.847673] env[69367]: ERROR nova.compute.manager [instance: 25be9c82-df06-498d-b5e7-c59e0ceed475] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 719.847673] env[69367]: ERROR nova.compute.manager [instance: 25be9c82-df06-498d-b5e7-c59e0ceed475] self.reportclient.update_from_provider_tree( [ 719.847673] env[69367]: ERROR nova.compute.manager [instance: 25be9c82-df06-498d-b5e7-c59e0ceed475] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 719.847673] env[69367]: ERROR nova.compute.manager [instance: 25be9c82-df06-498d-b5e7-c59e0ceed475] with catch_all(pd.uuid): [ 719.847673] env[69367]: ERROR nova.compute.manager [instance: 25be9c82-df06-498d-b5e7-c59e0ceed475] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 719.847673] env[69367]: ERROR nova.compute.manager [instance: 25be9c82-df06-498d-b5e7-c59e0ceed475] self.gen.throw(typ, value, traceback) [ 719.847673] env[69367]: ERROR nova.compute.manager [instance: 25be9c82-df06-498d-b5e7-c59e0ceed475] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 719.847673] env[69367]: ERROR nova.compute.manager [instance: 25be9c82-df06-498d-b5e7-c59e0ceed475] raise exception.ResourceProviderSyncFailed() [ 719.847673] env[69367]: ERROR nova.compute.manager [instance: 25be9c82-df06-498d-b5e7-c59e0ceed475] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 719.847673] env[69367]: ERROR nova.compute.manager [instance: 25be9c82-df06-498d-b5e7-c59e0ceed475] [ 719.847993] env[69367]: DEBUG nova.compute.utils [None req-1f53a5e1-fbe5-4540-af3f-45cfe8ec9f64 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 25be9c82-df06-498d-b5e7-c59e0ceed475] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 719.849733] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 27.691s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 719.849733] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 719.849733] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69367) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 719.850144] env[69367]: DEBUG oslo_concurrency.lockutils [None req-4d7c067b-2e06-4f0e-b1ed-f10936ffec18 tempest-ServersNegativeTestMultiTenantJSON-1342218623 tempest-ServersNegativeTestMultiTenantJSON-1342218623-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.442s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 719.851693] env[69367]: INFO nova.compute.claims [None req-4d7c067b-2e06-4f0e-b1ed-f10936ffec18 tempest-ServersNegativeTestMultiTenantJSON-1342218623 tempest-ServersNegativeTestMultiTenantJSON-1342218623-project-member] [instance: 13c1b1aa-2190-4d28-81ad-697e4c098897] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 719.855171] env[69367]: DEBUG nova.compute.manager [None req-1f53a5e1-fbe5-4540-af3f-45cfe8ec9f64 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 25be9c82-df06-498d-b5e7-c59e0ceed475] Build of instance 25be9c82-df06-498d-b5e7-c59e0ceed475 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 719.855822] env[69367]: DEBUG nova.compute.manager [None req-1f53a5e1-fbe5-4540-af3f-45cfe8ec9f64 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 25be9c82-df06-498d-b5e7-c59e0ceed475] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 719.855822] env[69367]: DEBUG oslo_concurrency.lockutils [None req-1f53a5e1-fbe5-4540-af3f-45cfe8ec9f64 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Acquiring lock "refresh_cache-25be9c82-df06-498d-b5e7-c59e0ceed475" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 719.855979] env[69367]: DEBUG oslo_concurrency.lockutils [None req-1f53a5e1-fbe5-4540-af3f-45cfe8ec9f64 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Acquired lock "refresh_cache-25be9c82-df06-498d-b5e7-c59e0ceed475" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 719.856477] env[69367]: DEBUG nova.network.neutron [None req-1f53a5e1-fbe5-4540-af3f-45cfe8ec9f64 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 25be9c82-df06-498d-b5e7-c59e0ceed475] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 719.858855] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b40151b1-d557-4ec4-b125-4e9f8120988e {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.875071] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed00c423-fd74-4edc-997d-49506f8b4787 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.893414] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b0c4563-25fe-496f-8988-27cee10440a5 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.905930] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4e75f71-5e87-415c-996a-d65019df2f8d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.940735] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179986MB free_disk=0GB free_vcpus=48 pci_devices=None {{(pid=69367) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 719.940895] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 720.067086] env[69367]: DEBUG nova.compute.manager [None req-89f1736f-ef76-4bc5-98e1-36367045736e tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: 10419c72-9876-45d3-a941-46464b47fddc] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 720.108836] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5256b69c-c957-4645-bf1f-f7ae1efeab28 tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 720.223613] env[69367]: DEBUG oslo_concurrency.lockutils [None req-db64bc43-1101-4927-8092-095f6fb60a66 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Releasing lock "refresh_cache-788b843c-1496-4562-a761-44f3e1ce6da2" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 720.390056] env[69367]: DEBUG nova.network.neutron [None req-1f53a5e1-fbe5-4540-af3f-45cfe8ec9f64 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 25be9c82-df06-498d-b5e7-c59e0ceed475] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 720.506152] env[69367]: DEBUG nova.network.neutron [None req-1f53a5e1-fbe5-4540-af3f-45cfe8ec9f64 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 25be9c82-df06-498d-b5e7-c59e0ceed475] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 720.592789] env[69367]: DEBUG oslo_concurrency.lockutils [None req-89f1736f-ef76-4bc5-98e1-36367045736e tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 720.731368] env[69367]: DEBUG nova.compute.manager [None req-db64bc43-1101-4927-8092-095f6fb60a66 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] Checking state {{(pid=69367) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 720.732406] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92c6ccc0-0158-48b0-b343-92d09c46b2d5 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.887011] env[69367]: DEBUG nova.scheduler.client.report [None req-4d7c067b-2e06-4f0e-b1ed-f10936ffec18 tempest-ServersNegativeTestMultiTenantJSON-1342218623 tempest-ServersNegativeTestMultiTenantJSON-1342218623-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 720.906015] env[69367]: DEBUG nova.scheduler.client.report [None req-4d7c067b-2e06-4f0e-b1ed-f10936ffec18 tempest-ServersNegativeTestMultiTenantJSON-1342218623 tempest-ServersNegativeTestMultiTenantJSON-1342218623-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 720.906015] env[69367]: DEBUG nova.compute.provider_tree [None req-4d7c067b-2e06-4f0e-b1ed-f10936ffec18 tempest-ServersNegativeTestMultiTenantJSON-1342218623 tempest-ServersNegativeTestMultiTenantJSON-1342218623-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 720.918044] env[69367]: DEBUG nova.scheduler.client.report [None req-4d7c067b-2e06-4f0e-b1ed-f10936ffec18 tempest-ServersNegativeTestMultiTenantJSON-1342218623 tempest-ServersNegativeTestMultiTenantJSON-1342218623-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 720.942604] env[69367]: DEBUG nova.scheduler.client.report [None req-4d7c067b-2e06-4f0e-b1ed-f10936ffec18 tempest-ServersNegativeTestMultiTenantJSON-1342218623 tempest-ServersNegativeTestMultiTenantJSON-1342218623-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 720.942846] env[69367]: DEBUG nova.compute.provider_tree [None req-4d7c067b-2e06-4f0e-b1ed-f10936ffec18 tempest-ServersNegativeTestMultiTenantJSON-1342218623 tempest-ServersNegativeTestMultiTenantJSON-1342218623-project-member] Updating resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 generation from 60 to 61 during operation: update_traits {{(pid=69367) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 721.008598] env[69367]: DEBUG oslo_concurrency.lockutils [None req-1f53a5e1-fbe5-4540-af3f-45cfe8ec9f64 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Releasing lock "refresh_cache-25be9c82-df06-498d-b5e7-c59e0ceed475" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 721.008761] env[69367]: DEBUG nova.compute.manager [None req-1f53a5e1-fbe5-4540-af3f-45cfe8ec9f64 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 721.008970] env[69367]: DEBUG nova.compute.manager [None req-1f53a5e1-fbe5-4540-af3f-45cfe8ec9f64 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 25be9c82-df06-498d-b5e7-c59e0ceed475] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 721.009168] env[69367]: DEBUG nova.network.neutron [None req-1f53a5e1-fbe5-4540-af3f-45cfe8ec9f64 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 25be9c82-df06-498d-b5e7-c59e0ceed475] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 721.045419] env[69367]: DEBUG nova.network.neutron [None req-1f53a5e1-fbe5-4540-af3f-45cfe8ec9f64 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 25be9c82-df06-498d-b5e7-c59e0ceed475] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 721.388635] env[69367]: DEBUG oslo_concurrency.lockutils [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Acquiring lock "557dc011-44a1-4240-9596-d055d57e176f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 721.388887] env[69367]: DEBUG oslo_concurrency.lockutils [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Lock "557dc011-44a1-4240-9596-d055d57e176f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 721.490041] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23b893bd-9a6a-465c-8f87-7a149e9ab07b {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.498371] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c9567e6-cc29-44d5-b7c5-39702a86c3db {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.533084] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-515d52c0-074e-4fe6-81b5-a0934ea3b80a {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.542468] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbb79d81-e883-40bc-a4e0-46bfc3d30387 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.557661] env[69367]: DEBUG nova.network.neutron [None req-1f53a5e1-fbe5-4540-af3f-45cfe8ec9f64 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 25be9c82-df06-498d-b5e7-c59e0ceed475] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 721.559444] env[69367]: DEBUG nova.compute.provider_tree [None req-4d7c067b-2e06-4f0e-b1ed-f10936ffec18 tempest-ServersNegativeTestMultiTenantJSON-1342218623 tempest-ServersNegativeTestMultiTenantJSON-1342218623-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 721.752472] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21c38290-0f37-4f43-a524-31e048e6a2c8 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.760192] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-db64bc43-1101-4927-8092-095f6fb60a66 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] Doing hard reboot of VM {{(pid=69367) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 721.760192] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-b2c8c8d4-f50c-4cb5-836d-a2ff836be867 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.775863] env[69367]: DEBUG oslo_vmware.api [None req-db64bc43-1101-4927-8092-095f6fb60a66 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Waiting for the task: (returnval){ [ 721.775863] env[69367]: value = "task-4233905" [ 721.775863] env[69367]: _type = "Task" [ 721.775863] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 721.787068] env[69367]: DEBUG oslo_vmware.api [None req-db64bc43-1101-4927-8092-095f6fb60a66 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Task: {'id': task-4233905, 'name': ResetVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.066242] env[69367]: INFO nova.compute.manager [None req-1f53a5e1-fbe5-4540-af3f-45cfe8ec9f64 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 25be9c82-df06-498d-b5e7-c59e0ceed475] Took 1.06 seconds to deallocate network for instance. [ 722.106459] env[69367]: ERROR nova.scheduler.client.report [None req-4d7c067b-2e06-4f0e-b1ed-f10936ffec18 tempest-ServersNegativeTestMultiTenantJSON-1342218623 tempest-ServersNegativeTestMultiTenantJSON-1342218623-project-member] [req-48809c0d-cefa-4f39-a7b6-a6c34575bc37] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-48809c0d-cefa-4f39-a7b6-a6c34575bc37"}]} [ 722.106459] env[69367]: DEBUG oslo_concurrency.lockutils [None req-4d7c067b-2e06-4f0e-b1ed-f10936ffec18 tempest-ServersNegativeTestMultiTenantJSON-1342218623 tempest-ServersNegativeTestMultiTenantJSON-1342218623-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.254s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 722.106795] env[69367]: ERROR nova.compute.manager [None req-4d7c067b-2e06-4f0e-b1ed-f10936ffec18 tempest-ServersNegativeTestMultiTenantJSON-1342218623 tempest-ServersNegativeTestMultiTenantJSON-1342218623-project-member] [instance: 13c1b1aa-2190-4d28-81ad-697e4c098897] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 722.106795] env[69367]: ERROR nova.compute.manager [instance: 13c1b1aa-2190-4d28-81ad-697e4c098897] Traceback (most recent call last): [ 722.106795] env[69367]: ERROR nova.compute.manager [instance: 13c1b1aa-2190-4d28-81ad-697e4c098897] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 722.106795] env[69367]: ERROR nova.compute.manager [instance: 13c1b1aa-2190-4d28-81ad-697e4c098897] yield [ 722.106795] env[69367]: ERROR nova.compute.manager [instance: 13c1b1aa-2190-4d28-81ad-697e4c098897] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 722.106795] env[69367]: ERROR nova.compute.manager [instance: 13c1b1aa-2190-4d28-81ad-697e4c098897] self.set_inventory_for_provider( [ 722.106795] env[69367]: ERROR nova.compute.manager [instance: 13c1b1aa-2190-4d28-81ad-697e4c098897] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 722.106795] env[69367]: ERROR nova.compute.manager [instance: 13c1b1aa-2190-4d28-81ad-697e4c098897] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 722.106996] env[69367]: ERROR nova.compute.manager [instance: 13c1b1aa-2190-4d28-81ad-697e4c098897] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-48809c0d-cefa-4f39-a7b6-a6c34575bc37"}]} [ 722.106996] env[69367]: ERROR nova.compute.manager [instance: 13c1b1aa-2190-4d28-81ad-697e4c098897] [ 722.106996] env[69367]: ERROR nova.compute.manager [instance: 13c1b1aa-2190-4d28-81ad-697e4c098897] During handling of the above exception, another exception occurred: [ 722.106996] env[69367]: ERROR nova.compute.manager [instance: 13c1b1aa-2190-4d28-81ad-697e4c098897] [ 722.106996] env[69367]: ERROR nova.compute.manager [instance: 13c1b1aa-2190-4d28-81ad-697e4c098897] Traceback (most recent call last): [ 722.106996] env[69367]: ERROR nova.compute.manager [instance: 13c1b1aa-2190-4d28-81ad-697e4c098897] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 722.106996] env[69367]: ERROR nova.compute.manager [instance: 13c1b1aa-2190-4d28-81ad-697e4c098897] with self.rt.instance_claim(context, instance, node, allocs, [ 722.106996] env[69367]: ERROR nova.compute.manager [instance: 13c1b1aa-2190-4d28-81ad-697e4c098897] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 722.106996] env[69367]: ERROR nova.compute.manager [instance: 13c1b1aa-2190-4d28-81ad-697e4c098897] return f(*args, **kwargs) [ 722.107257] env[69367]: ERROR nova.compute.manager [instance: 13c1b1aa-2190-4d28-81ad-697e4c098897] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 722.107257] env[69367]: ERROR nova.compute.manager [instance: 13c1b1aa-2190-4d28-81ad-697e4c098897] self._update(elevated, cn) [ 722.107257] env[69367]: ERROR nova.compute.manager [instance: 13c1b1aa-2190-4d28-81ad-697e4c098897] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 722.107257] env[69367]: ERROR nova.compute.manager [instance: 13c1b1aa-2190-4d28-81ad-697e4c098897] self._update_to_placement(context, compute_node, startup) [ 722.107257] env[69367]: ERROR nova.compute.manager [instance: 13c1b1aa-2190-4d28-81ad-697e4c098897] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 722.107257] env[69367]: ERROR nova.compute.manager [instance: 13c1b1aa-2190-4d28-81ad-697e4c098897] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 722.107257] env[69367]: ERROR nova.compute.manager [instance: 13c1b1aa-2190-4d28-81ad-697e4c098897] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 722.107257] env[69367]: ERROR nova.compute.manager [instance: 13c1b1aa-2190-4d28-81ad-697e4c098897] return attempt.get(self._wrap_exception) [ 722.107257] env[69367]: ERROR nova.compute.manager [instance: 13c1b1aa-2190-4d28-81ad-697e4c098897] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 722.107257] env[69367]: ERROR nova.compute.manager [instance: 13c1b1aa-2190-4d28-81ad-697e4c098897] six.reraise(self.value[0], self.value[1], self.value[2]) [ 722.107257] env[69367]: ERROR nova.compute.manager [instance: 13c1b1aa-2190-4d28-81ad-697e4c098897] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 722.107257] env[69367]: ERROR nova.compute.manager [instance: 13c1b1aa-2190-4d28-81ad-697e4c098897] raise value [ 722.107257] env[69367]: ERROR nova.compute.manager [instance: 13c1b1aa-2190-4d28-81ad-697e4c098897] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 722.107562] env[69367]: ERROR nova.compute.manager [instance: 13c1b1aa-2190-4d28-81ad-697e4c098897] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 722.107562] env[69367]: ERROR nova.compute.manager [instance: 13c1b1aa-2190-4d28-81ad-697e4c098897] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 722.107562] env[69367]: ERROR nova.compute.manager [instance: 13c1b1aa-2190-4d28-81ad-697e4c098897] self.reportclient.update_from_provider_tree( [ 722.107562] env[69367]: ERROR nova.compute.manager [instance: 13c1b1aa-2190-4d28-81ad-697e4c098897] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 722.107562] env[69367]: ERROR nova.compute.manager [instance: 13c1b1aa-2190-4d28-81ad-697e4c098897] with catch_all(pd.uuid): [ 722.107562] env[69367]: ERROR nova.compute.manager [instance: 13c1b1aa-2190-4d28-81ad-697e4c098897] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 722.107562] env[69367]: ERROR nova.compute.manager [instance: 13c1b1aa-2190-4d28-81ad-697e4c098897] self.gen.throw(typ, value, traceback) [ 722.107562] env[69367]: ERROR nova.compute.manager [instance: 13c1b1aa-2190-4d28-81ad-697e4c098897] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 722.107562] env[69367]: ERROR nova.compute.manager [instance: 13c1b1aa-2190-4d28-81ad-697e4c098897] raise exception.ResourceProviderSyncFailed() [ 722.107562] env[69367]: ERROR nova.compute.manager [instance: 13c1b1aa-2190-4d28-81ad-697e4c098897] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 722.107562] env[69367]: ERROR nova.compute.manager [instance: 13c1b1aa-2190-4d28-81ad-697e4c098897] [ 722.107821] env[69367]: DEBUG nova.compute.utils [None req-4d7c067b-2e06-4f0e-b1ed-f10936ffec18 tempest-ServersNegativeTestMultiTenantJSON-1342218623 tempest-ServersNegativeTestMultiTenantJSON-1342218623-project-member] [instance: 13c1b1aa-2190-4d28-81ad-697e4c098897] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 722.108717] env[69367]: DEBUG oslo_concurrency.lockutils [None req-1d63d810-5e66-402c-9d42-c4dffd33b19e tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.633s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 722.109262] env[69367]: DEBUG oslo_concurrency.lockutils [None req-1d63d810-5e66-402c-9d42-c4dffd33b19e tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 722.112205] env[69367]: DEBUG oslo_concurrency.lockutils [None req-21dbec28-a61b-45fb-99b0-7b920d8d1837 tempest-ServerExternalEventsTest-336054206 tempest-ServerExternalEventsTest-336054206-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.541s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 722.116250] env[69367]: INFO nova.compute.claims [None req-21dbec28-a61b-45fb-99b0-7b920d8d1837 tempest-ServerExternalEventsTest-336054206 tempest-ServerExternalEventsTest-336054206-project-member] [instance: d785944c-d65e-4a9e-91e5-0c0911b25227] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 722.118935] env[69367]: DEBUG nova.compute.manager [None req-4d7c067b-2e06-4f0e-b1ed-f10936ffec18 tempest-ServersNegativeTestMultiTenantJSON-1342218623 tempest-ServersNegativeTestMultiTenantJSON-1342218623-project-member] [instance: 13c1b1aa-2190-4d28-81ad-697e4c098897] Build of instance 13c1b1aa-2190-4d28-81ad-697e4c098897 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 722.119552] env[69367]: DEBUG nova.compute.manager [None req-4d7c067b-2e06-4f0e-b1ed-f10936ffec18 tempest-ServersNegativeTestMultiTenantJSON-1342218623 tempest-ServersNegativeTestMultiTenantJSON-1342218623-project-member] [instance: 13c1b1aa-2190-4d28-81ad-697e4c098897] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 722.119908] env[69367]: DEBUG oslo_concurrency.lockutils [None req-4d7c067b-2e06-4f0e-b1ed-f10936ffec18 tempest-ServersNegativeTestMultiTenantJSON-1342218623 tempest-ServersNegativeTestMultiTenantJSON-1342218623-project-member] Acquiring lock "refresh_cache-13c1b1aa-2190-4d28-81ad-697e4c098897" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 722.120177] env[69367]: DEBUG oslo_concurrency.lockutils [None req-4d7c067b-2e06-4f0e-b1ed-f10936ffec18 tempest-ServersNegativeTestMultiTenantJSON-1342218623 tempest-ServersNegativeTestMultiTenantJSON-1342218623-project-member] Acquired lock "refresh_cache-13c1b1aa-2190-4d28-81ad-697e4c098897" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 722.120433] env[69367]: DEBUG nova.network.neutron [None req-4d7c067b-2e06-4f0e-b1ed-f10936ffec18 tempest-ServersNegativeTestMultiTenantJSON-1342218623 tempest-ServersNegativeTestMultiTenantJSON-1342218623-project-member] [instance: 13c1b1aa-2190-4d28-81ad-697e4c098897] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 722.158018] env[69367]: INFO nova.scheduler.client.report [None req-1d63d810-5e66-402c-9d42-c4dffd33b19e tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Deleted allocations for instance 5c7b2127-e875-4222-8148-a2ea60631c25 [ 722.158018] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Acquiring lock "8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 722.158018] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Lock "8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 722.158018] env[69367]: INFO nova.compute.manager [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Shelving [ 722.288650] env[69367]: DEBUG oslo_vmware.api [None req-db64bc43-1101-4927-8092-095f6fb60a66 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Task: {'id': task-4233905, 'name': ResetVM_Task, 'duration_secs': 0.113958} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.292127] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-db64bc43-1101-4927-8092-095f6fb60a66 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] Did hard reboot of VM {{(pid=69367) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 722.292127] env[69367]: DEBUG nova.compute.manager [None req-db64bc43-1101-4927-8092-095f6fb60a66 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] Checking state {{(pid=69367) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 722.292127] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccc318bb-5b67-4ab2-b8c0-3f1f6aacc666 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.655841] env[69367]: DEBUG nova.network.neutron [None req-4d7c067b-2e06-4f0e-b1ed-f10936ffec18 tempest-ServersNegativeTestMultiTenantJSON-1342218623 tempest-ServersNegativeTestMultiTenantJSON-1342218623-project-member] [instance: 13c1b1aa-2190-4d28-81ad-697e4c098897] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 722.670525] env[69367]: DEBUG oslo_concurrency.lockutils [None req-1d63d810-5e66-402c-9d42-c4dffd33b19e tempest-DeleteServersAdminTestJSON-480487764 tempest-DeleteServersAdminTestJSON-480487764-project-member] Lock "5c7b2127-e875-4222-8148-a2ea60631c25" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.440s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 722.773753] env[69367]: DEBUG nova.network.neutron [None req-4d7c067b-2e06-4f0e-b1ed-f10936ffec18 tempest-ServersNegativeTestMultiTenantJSON-1342218623 tempest-ServersNegativeTestMultiTenantJSON-1342218623-project-member] [instance: 13c1b1aa-2190-4d28-81ad-697e4c098897] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 722.805210] env[69367]: DEBUG oslo_concurrency.lockutils [None req-db64bc43-1101-4927-8092-095f6fb60a66 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Lock "788b843c-1496-4562-a761-44f3e1ce6da2" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.936s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 723.013518] env[69367]: DEBUG nova.compute.manager [req-7b87d977-6fec-4f3b-ad6e-cc926ad04e3a req-a0c98eec-4895-4448-8ea3-5b05d44f3f6c service nova] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] Received event network-changed-df0ab2ea-5a93-4792-b937-b8327787a7b4 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 723.014786] env[69367]: DEBUG nova.compute.manager [req-7b87d977-6fec-4f3b-ad6e-cc926ad04e3a req-a0c98eec-4895-4448-8ea3-5b05d44f3f6c service nova] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] Refreshing instance network info cache due to event network-changed-df0ab2ea-5a93-4792-b937-b8327787a7b4. {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11772}} [ 723.016321] env[69367]: DEBUG oslo_concurrency.lockutils [req-7b87d977-6fec-4f3b-ad6e-cc926ad04e3a req-a0c98eec-4895-4448-8ea3-5b05d44f3f6c service nova] Acquiring lock "refresh_cache-788b843c-1496-4562-a761-44f3e1ce6da2" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 723.016774] env[69367]: DEBUG oslo_concurrency.lockutils [req-7b87d977-6fec-4f3b-ad6e-cc926ad04e3a req-a0c98eec-4895-4448-8ea3-5b05d44f3f6c service nova] Acquired lock "refresh_cache-788b843c-1496-4562-a761-44f3e1ce6da2" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 723.017035] env[69367]: DEBUG nova.network.neutron [req-7b87d977-6fec-4f3b-ad6e-cc926ad04e3a req-a0c98eec-4895-4448-8ea3-5b05d44f3f6c service nova] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] Refreshing network info cache for port df0ab2ea-5a93-4792-b937-b8327787a7b4 {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 723.104497] env[69367]: INFO nova.scheduler.client.report [None req-1f53a5e1-fbe5-4540-af3f-45cfe8ec9f64 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Deleted allocations for instance 25be9c82-df06-498d-b5e7-c59e0ceed475 [ 723.152176] env[69367]: DEBUG nova.scheduler.client.report [None req-21dbec28-a61b-45fb-99b0-7b920d8d1837 tempest-ServerExternalEventsTest-336054206 tempest-ServerExternalEventsTest-336054206-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 723.172054] env[69367]: DEBUG nova.scheduler.client.report [None req-21dbec28-a61b-45fb-99b0-7b920d8d1837 tempest-ServerExternalEventsTest-336054206 tempest-ServerExternalEventsTest-336054206-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 723.172307] env[69367]: DEBUG nova.compute.provider_tree [None req-21dbec28-a61b-45fb-99b0-7b920d8d1837 tempest-ServerExternalEventsTest-336054206 tempest-ServerExternalEventsTest-336054206-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 723.184365] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Powering off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 723.185427] env[69367]: DEBUG nova.scheduler.client.report [None req-21dbec28-a61b-45fb-99b0-7b920d8d1837 tempest-ServerExternalEventsTest-336054206 tempest-ServerExternalEventsTest-336054206-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 723.187412] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b31f7a71-95d2-4ba7-83c1-a5b719ade799 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.196749] env[69367]: DEBUG oslo_vmware.api [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Waiting for the task: (returnval){ [ 723.196749] env[69367]: value = "task-4233906" [ 723.196749] env[69367]: _type = "Task" [ 723.196749] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.211436] env[69367]: DEBUG oslo_vmware.api [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4233906, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.214931] env[69367]: DEBUG nova.scheduler.client.report [None req-21dbec28-a61b-45fb-99b0-7b920d8d1837 tempest-ServerExternalEventsTest-336054206 tempest-ServerExternalEventsTest-336054206-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 723.277835] env[69367]: DEBUG oslo_concurrency.lockutils [None req-4d7c067b-2e06-4f0e-b1ed-f10936ffec18 tempest-ServersNegativeTestMultiTenantJSON-1342218623 tempest-ServersNegativeTestMultiTenantJSON-1342218623-project-member] Releasing lock "refresh_cache-13c1b1aa-2190-4d28-81ad-697e4c098897" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 723.278090] env[69367]: DEBUG nova.compute.manager [None req-4d7c067b-2e06-4f0e-b1ed-f10936ffec18 tempest-ServersNegativeTestMultiTenantJSON-1342218623 tempest-ServersNegativeTestMultiTenantJSON-1342218623-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 723.278277] env[69367]: DEBUG nova.compute.manager [None req-4d7c067b-2e06-4f0e-b1ed-f10936ffec18 tempest-ServersNegativeTestMultiTenantJSON-1342218623 tempest-ServersNegativeTestMultiTenantJSON-1342218623-project-member] [instance: 13c1b1aa-2190-4d28-81ad-697e4c098897] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 723.278446] env[69367]: DEBUG nova.network.neutron [None req-4d7c067b-2e06-4f0e-b1ed-f10936ffec18 tempest-ServersNegativeTestMultiTenantJSON-1342218623 tempest-ServersNegativeTestMultiTenantJSON-1342218623-project-member] [instance: 13c1b1aa-2190-4d28-81ad-697e4c098897] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 723.305193] env[69367]: DEBUG nova.network.neutron [None req-4d7c067b-2e06-4f0e-b1ed-f10936ffec18 tempest-ServersNegativeTestMultiTenantJSON-1342218623 tempest-ServersNegativeTestMultiTenantJSON-1342218623-project-member] [instance: 13c1b1aa-2190-4d28-81ad-697e4c098897] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 723.613508] env[69367]: DEBUG oslo_concurrency.lockutils [None req-1f53a5e1-fbe5-4540-af3f-45cfe8ec9f64 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Lock "25be9c82-df06-498d-b5e7-c59e0ceed475" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.262s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 723.698679] env[69367]: DEBUG oslo_concurrency.lockutils [None req-601e8c8f-7848-449b-9807-239b90966e66 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Acquiring lock "788b843c-1496-4562-a761-44f3e1ce6da2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 723.698679] env[69367]: DEBUG oslo_concurrency.lockutils [None req-601e8c8f-7848-449b-9807-239b90966e66 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Lock "788b843c-1496-4562-a761-44f3e1ce6da2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 723.698679] env[69367]: DEBUG oslo_concurrency.lockutils [None req-601e8c8f-7848-449b-9807-239b90966e66 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Acquiring lock "788b843c-1496-4562-a761-44f3e1ce6da2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 723.698899] env[69367]: DEBUG oslo_concurrency.lockutils [None req-601e8c8f-7848-449b-9807-239b90966e66 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Lock "788b843c-1496-4562-a761-44f3e1ce6da2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 723.699016] env[69367]: DEBUG oslo_concurrency.lockutils [None req-601e8c8f-7848-449b-9807-239b90966e66 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Lock "788b843c-1496-4562-a761-44f3e1ce6da2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 723.707358] env[69367]: INFO nova.compute.manager [None req-601e8c8f-7848-449b-9807-239b90966e66 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] Terminating instance [ 723.719136] env[69367]: DEBUG oslo_vmware.api [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4233906, 'name': PowerOffVM_Task, 'duration_secs': 0.264435} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.720412] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Powered off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 723.720412] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9a2d320-8cfe-4a15-b0a8-b65b240cd659 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.751216] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3632acd8-59ce-43e0-8fb4-18a824fa0ba5 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.783409] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fea06db5-789b-497b-b53c-3b209f185a70 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.792305] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73f8bad2-bb6c-4e5d-b53d-9cd11033ad59 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.825626] env[69367]: DEBUG nova.network.neutron [None req-4d7c067b-2e06-4f0e-b1ed-f10936ffec18 tempest-ServersNegativeTestMultiTenantJSON-1342218623 tempest-ServersNegativeTestMultiTenantJSON-1342218623-project-member] [instance: 13c1b1aa-2190-4d28-81ad-697e4c098897] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 723.830725] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f394464-f549-4678-a924-fc3b2a25e039 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.840469] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb64125e-117d-4f0a-8ade-3f067874454a {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.856320] env[69367]: DEBUG nova.compute.provider_tree [None req-21dbec28-a61b-45fb-99b0-7b920d8d1837 tempest-ServerExternalEventsTest-336054206 tempest-ServerExternalEventsTest-336054206-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 723.869593] env[69367]: DEBUG nova.network.neutron [req-7b87d977-6fec-4f3b-ad6e-cc926ad04e3a req-a0c98eec-4895-4448-8ea3-5b05d44f3f6c service nova] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] Updated VIF entry in instance network info cache for port df0ab2ea-5a93-4792-b937-b8327787a7b4. {{(pid=69367) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 723.870019] env[69367]: DEBUG nova.network.neutron [req-7b87d977-6fec-4f3b-ad6e-cc926ad04e3a req-a0c98eec-4895-4448-8ea3-5b05d44f3f6c service nova] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] Updating instance_info_cache with network_info: [{"id": "df0ab2ea-5a93-4792-b937-b8327787a7b4", "address": "fa:16:3e:ab:f3:e3", "network": {"id": "dd68ce65-5682-4b4c-913c-cf699d2146be", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-341319856-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2375d6603eef45069be4a3541519002a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2c019b6-3ef3-4c8f-95bd-edede2c554a9", "external-id": "nsx-vlan-transportzone-364", "segmentation_id": 364, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdf0ab2ea-5a", "ovs_interfaceid": "df0ab2ea-5a93-4792-b937-b8327787a7b4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 724.116030] env[69367]: DEBUG nova.compute.manager [None req-cd36cff0-1cf6-4889-b011-15c7764840c6 tempest-InstanceActionsTestJSON-640821445 tempest-InstanceActionsTestJSON-640821445-project-member] [instance: 32ad9bbe-f92c-488d-a98a-d28bbfe8293f] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 724.213302] env[69367]: DEBUG nova.compute.manager [None req-601e8c8f-7848-449b-9807-239b90966e66 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] Start destroying the instance on the hypervisor. {{(pid=69367) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 724.213588] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-601e8c8f-7848-449b-9807-239b90966e66 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] Destroying instance {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 724.214640] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccb51a27-425f-4d36-a9b5-7a6a259c9230 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.224472] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-601e8c8f-7848-449b-9807-239b90966e66 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] Powering off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 724.224830] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ff3bd53e-1446-4277-a53c-1649b5028baa {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.233630] env[69367]: DEBUG oslo_vmware.api [None req-601e8c8f-7848-449b-9807-239b90966e66 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Waiting for the task: (returnval){ [ 724.233630] env[69367]: value = "task-4233907" [ 724.233630] env[69367]: _type = "Task" [ 724.233630] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.245784] env[69367]: DEBUG oslo_vmware.api [None req-601e8c8f-7848-449b-9807-239b90966e66 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Task: {'id': task-4233907, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.269337] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Creating Snapshot of the VM instance {{(pid=69367) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 724.269337] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-abb462e7-67c3-4070-8879-27473142c4b7 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.275212] env[69367]: DEBUG oslo_vmware.api [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Waiting for the task: (returnval){ [ 724.275212] env[69367]: value = "task-4233908" [ 724.275212] env[69367]: _type = "Task" [ 724.275212] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.285647] env[69367]: DEBUG oslo_vmware.api [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4233908, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.329134] env[69367]: INFO nova.compute.manager [None req-4d7c067b-2e06-4f0e-b1ed-f10936ffec18 tempest-ServersNegativeTestMultiTenantJSON-1342218623 tempest-ServersNegativeTestMultiTenantJSON-1342218623-project-member] [instance: 13c1b1aa-2190-4d28-81ad-697e4c098897] Took 1.05 seconds to deallocate network for instance. [ 724.373074] env[69367]: DEBUG oslo_concurrency.lockutils [req-7b87d977-6fec-4f3b-ad6e-cc926ad04e3a req-a0c98eec-4895-4448-8ea3-5b05d44f3f6c service nova] Releasing lock "refresh_cache-788b843c-1496-4562-a761-44f3e1ce6da2" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 724.390643] env[69367]: ERROR nova.scheduler.client.report [None req-21dbec28-a61b-45fb-99b0-7b920d8d1837 tempest-ServerExternalEventsTest-336054206 tempest-ServerExternalEventsTest-336054206-project-member] [req-a6dd1097-c637-449d-aaa5-1831d31d29ca] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-a6dd1097-c637-449d-aaa5-1831d31d29ca"}]} [ 724.391054] env[69367]: DEBUG oslo_concurrency.lockutils [None req-21dbec28-a61b-45fb-99b0-7b920d8d1837 tempest-ServerExternalEventsTest-336054206 tempest-ServerExternalEventsTest-336054206-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.279s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 724.391792] env[69367]: ERROR nova.compute.manager [None req-21dbec28-a61b-45fb-99b0-7b920d8d1837 tempest-ServerExternalEventsTest-336054206 tempest-ServerExternalEventsTest-336054206-project-member] [instance: d785944c-d65e-4a9e-91e5-0c0911b25227] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 724.391792] env[69367]: ERROR nova.compute.manager [instance: d785944c-d65e-4a9e-91e5-0c0911b25227] Traceback (most recent call last): [ 724.391792] env[69367]: ERROR nova.compute.manager [instance: d785944c-d65e-4a9e-91e5-0c0911b25227] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 724.391792] env[69367]: ERROR nova.compute.manager [instance: d785944c-d65e-4a9e-91e5-0c0911b25227] yield [ 724.391792] env[69367]: ERROR nova.compute.manager [instance: d785944c-d65e-4a9e-91e5-0c0911b25227] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 724.391792] env[69367]: ERROR nova.compute.manager [instance: d785944c-d65e-4a9e-91e5-0c0911b25227] self.set_inventory_for_provider( [ 724.391792] env[69367]: ERROR nova.compute.manager [instance: d785944c-d65e-4a9e-91e5-0c0911b25227] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 724.391792] env[69367]: ERROR nova.compute.manager [instance: d785944c-d65e-4a9e-91e5-0c0911b25227] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 724.392150] env[69367]: ERROR nova.compute.manager [instance: d785944c-d65e-4a9e-91e5-0c0911b25227] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-a6dd1097-c637-449d-aaa5-1831d31d29ca"}]} [ 724.392150] env[69367]: ERROR nova.compute.manager [instance: d785944c-d65e-4a9e-91e5-0c0911b25227] [ 724.392150] env[69367]: ERROR nova.compute.manager [instance: d785944c-d65e-4a9e-91e5-0c0911b25227] During handling of the above exception, another exception occurred: [ 724.392150] env[69367]: ERROR nova.compute.manager [instance: d785944c-d65e-4a9e-91e5-0c0911b25227] [ 724.392150] env[69367]: ERROR nova.compute.manager [instance: d785944c-d65e-4a9e-91e5-0c0911b25227] Traceback (most recent call last): [ 724.392150] env[69367]: ERROR nova.compute.manager [instance: d785944c-d65e-4a9e-91e5-0c0911b25227] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 724.392150] env[69367]: ERROR nova.compute.manager [instance: d785944c-d65e-4a9e-91e5-0c0911b25227] with self.rt.instance_claim(context, instance, node, allocs, [ 724.392150] env[69367]: ERROR nova.compute.manager [instance: d785944c-d65e-4a9e-91e5-0c0911b25227] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 724.392150] env[69367]: ERROR nova.compute.manager [instance: d785944c-d65e-4a9e-91e5-0c0911b25227] return f(*args, **kwargs) [ 724.392468] env[69367]: ERROR nova.compute.manager [instance: d785944c-d65e-4a9e-91e5-0c0911b25227] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 724.392468] env[69367]: ERROR nova.compute.manager [instance: d785944c-d65e-4a9e-91e5-0c0911b25227] self._update(elevated, cn) [ 724.392468] env[69367]: ERROR nova.compute.manager [instance: d785944c-d65e-4a9e-91e5-0c0911b25227] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 724.392468] env[69367]: ERROR nova.compute.manager [instance: d785944c-d65e-4a9e-91e5-0c0911b25227] self._update_to_placement(context, compute_node, startup) [ 724.392468] env[69367]: ERROR nova.compute.manager [instance: d785944c-d65e-4a9e-91e5-0c0911b25227] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 724.392468] env[69367]: ERROR nova.compute.manager [instance: d785944c-d65e-4a9e-91e5-0c0911b25227] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 724.392468] env[69367]: ERROR nova.compute.manager [instance: d785944c-d65e-4a9e-91e5-0c0911b25227] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 724.392468] env[69367]: ERROR nova.compute.manager [instance: d785944c-d65e-4a9e-91e5-0c0911b25227] return attempt.get(self._wrap_exception) [ 724.392468] env[69367]: ERROR nova.compute.manager [instance: d785944c-d65e-4a9e-91e5-0c0911b25227] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 724.392468] env[69367]: ERROR nova.compute.manager [instance: d785944c-d65e-4a9e-91e5-0c0911b25227] six.reraise(self.value[0], self.value[1], self.value[2]) [ 724.392468] env[69367]: ERROR nova.compute.manager [instance: d785944c-d65e-4a9e-91e5-0c0911b25227] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 724.392468] env[69367]: ERROR nova.compute.manager [instance: d785944c-d65e-4a9e-91e5-0c0911b25227] raise value [ 724.392468] env[69367]: ERROR nova.compute.manager [instance: d785944c-d65e-4a9e-91e5-0c0911b25227] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 724.392855] env[69367]: ERROR nova.compute.manager [instance: d785944c-d65e-4a9e-91e5-0c0911b25227] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 724.392855] env[69367]: ERROR nova.compute.manager [instance: d785944c-d65e-4a9e-91e5-0c0911b25227] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 724.392855] env[69367]: ERROR nova.compute.manager [instance: d785944c-d65e-4a9e-91e5-0c0911b25227] self.reportclient.update_from_provider_tree( [ 724.392855] env[69367]: ERROR nova.compute.manager [instance: d785944c-d65e-4a9e-91e5-0c0911b25227] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 724.392855] env[69367]: ERROR nova.compute.manager [instance: d785944c-d65e-4a9e-91e5-0c0911b25227] with catch_all(pd.uuid): [ 724.392855] env[69367]: ERROR nova.compute.manager [instance: d785944c-d65e-4a9e-91e5-0c0911b25227] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 724.392855] env[69367]: ERROR nova.compute.manager [instance: d785944c-d65e-4a9e-91e5-0c0911b25227] self.gen.throw(typ, value, traceback) [ 724.392855] env[69367]: ERROR nova.compute.manager [instance: d785944c-d65e-4a9e-91e5-0c0911b25227] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 724.392855] env[69367]: ERROR nova.compute.manager [instance: d785944c-d65e-4a9e-91e5-0c0911b25227] raise exception.ResourceProviderSyncFailed() [ 724.392855] env[69367]: ERROR nova.compute.manager [instance: d785944c-d65e-4a9e-91e5-0c0911b25227] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 724.392855] env[69367]: ERROR nova.compute.manager [instance: d785944c-d65e-4a9e-91e5-0c0911b25227] [ 724.393249] env[69367]: DEBUG nova.compute.utils [None req-21dbec28-a61b-45fb-99b0-7b920d8d1837 tempest-ServerExternalEventsTest-336054206 tempest-ServerExternalEventsTest-336054206-project-member] [instance: d785944c-d65e-4a9e-91e5-0c0911b25227] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 724.395688] env[69367]: DEBUG oslo_concurrency.lockutils [None req-4a4e2df7-def0-4f89-8bf6-a91349a96fef tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.741s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 724.396046] env[69367]: DEBUG oslo_concurrency.lockutils [None req-4a4e2df7-def0-4f89-8bf6-a91349a96fef tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 724.396272] env[69367]: INFO nova.compute.manager [None req-4a4e2df7-def0-4f89-8bf6-a91349a96fef tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] [instance: 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9] Successfully reverted task state from None on failure for instance. [ 724.399952] env[69367]: DEBUG oslo_concurrency.lockutils [None req-b7f7cb48-f31d-4f35-8ea5-66b5e5394715 tempest-VolumesAdminNegativeTest-6889650 tempest-VolumesAdminNegativeTest-6889650-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.597s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 724.401800] env[69367]: INFO nova.compute.claims [None req-b7f7cb48-f31d-4f35-8ea5-66b5e5394715 tempest-VolumesAdminNegativeTest-6889650 tempest-VolumesAdminNegativeTest-6889650-project-member] [instance: de68478e-475a-45ef-9eed-44904fcfc1fd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 724.408566] env[69367]: ERROR oslo_messaging.rpc.server [None req-4a4e2df7-def0-4f89-8bf6-a91349a96fef tempest-ServersTestManualDisk-924480253 tempest-ServersTestManualDisk-924480253-project-member] Exception during message handling: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 724.408566] env[69367]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 724.408566] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 724.408566] env[69367]: ERROR oslo_messaging.rpc.server yield [ 724.408566] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 724.408566] env[69367]: ERROR oslo_messaging.rpc.server self.set_inventory_for_provider( [ 724.408566] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 724.408566] env[69367]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 724.408566] env[69367]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-d4bbe561-d8bd-4620-b728-29dd17c15d06"}]} [ 724.408566] env[69367]: ERROR oslo_messaging.rpc.server [ 724.408938] env[69367]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 724.408938] env[69367]: ERROR oslo_messaging.rpc.server [ 724.408938] env[69367]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 724.408938] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 174, in _process_incoming [ 724.408938] env[69367]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 724.408938] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 724.408938] env[69367]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 724.408938] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 724.408938] env[69367]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 724.408938] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 724.408938] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 724.408938] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 724.408938] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 724.408938] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 724.408938] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 724.408938] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 724.408938] env[69367]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 724.408938] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 167, in decorated_function [ 724.409544] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 724.409544] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 724.409544] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 724.409544] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 724.409544] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 724.409544] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 158, in decorated_function [ 724.409544] env[69367]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 724.409544] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1483, in decorated_function [ 724.409544] env[69367]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 724.409544] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 214, in decorated_function [ 724.409544] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 724.409544] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 724.409544] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 724.409544] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 724.409544] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 724.409544] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 204, in decorated_function [ 724.409544] env[69367]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 724.409544] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3388, in terminate_instance [ 724.410524] env[69367]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 724.410524] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 724.410524] env[69367]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 724.410524] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3383, in do_terminate_instance [ 724.410524] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 724.410524] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 724.410524] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 724.410524] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 724.410524] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 724.410524] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 724.410524] env[69367]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 724.410524] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 724.410524] env[69367]: ERROR oslo_messaging.rpc.server self._complete_deletion(context, instance) [ 724.410524] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 724.410524] env[69367]: ERROR oslo_messaging.rpc.server self._update_resource_tracker(context, instance) [ 724.410524] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 724.410524] env[69367]: ERROR oslo_messaging.rpc.server self.rt.update_usage(context, instance, instance.node) [ 724.410524] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 724.411051] env[69367]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 724.411051] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 724.411051] env[69367]: ERROR oslo_messaging.rpc.server self._update(context.elevated(), self.compute_nodes[nodename]) [ 724.411051] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 724.411051] env[69367]: ERROR oslo_messaging.rpc.server self._update_to_placement(context, compute_node, startup) [ 724.411051] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 724.411051] env[69367]: ERROR oslo_messaging.rpc.server return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 724.411051] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 724.411051] env[69367]: ERROR oslo_messaging.rpc.server return attempt.get(self._wrap_exception) [ 724.411051] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 724.411051] env[69367]: ERROR oslo_messaging.rpc.server six.reraise(self.value[0], self.value[1], self.value[2]) [ 724.411051] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 724.411051] env[69367]: ERROR oslo_messaging.rpc.server raise value [ 724.411051] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 724.411051] env[69367]: ERROR oslo_messaging.rpc.server attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 724.411051] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 724.411051] env[69367]: ERROR oslo_messaging.rpc.server self.reportclient.update_from_provider_tree( [ 724.411488] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 724.411488] env[69367]: ERROR oslo_messaging.rpc.server with catch_all(pd.uuid): [ 724.411488] env[69367]: ERROR oslo_messaging.rpc.server File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 724.411488] env[69367]: ERROR oslo_messaging.rpc.server self.gen.throw(typ, value, traceback) [ 724.411488] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 724.411488] env[69367]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderSyncFailed() [ 724.411488] env[69367]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 724.411488] env[69367]: ERROR oslo_messaging.rpc.server [ 724.411488] env[69367]: DEBUG nova.compute.manager [None req-21dbec28-a61b-45fb-99b0-7b920d8d1837 tempest-ServerExternalEventsTest-336054206 tempest-ServerExternalEventsTest-336054206-project-member] [instance: d785944c-d65e-4a9e-91e5-0c0911b25227] Build of instance d785944c-d65e-4a9e-91e5-0c0911b25227 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 724.411488] env[69367]: DEBUG nova.compute.manager [None req-21dbec28-a61b-45fb-99b0-7b920d8d1837 tempest-ServerExternalEventsTest-336054206 tempest-ServerExternalEventsTest-336054206-project-member] [instance: d785944c-d65e-4a9e-91e5-0c0911b25227] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 724.411775] env[69367]: DEBUG oslo_concurrency.lockutils [None req-21dbec28-a61b-45fb-99b0-7b920d8d1837 tempest-ServerExternalEventsTest-336054206 tempest-ServerExternalEventsTest-336054206-project-member] Acquiring lock "refresh_cache-d785944c-d65e-4a9e-91e5-0c0911b25227" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 724.411775] env[69367]: DEBUG oslo_concurrency.lockutils [None req-21dbec28-a61b-45fb-99b0-7b920d8d1837 tempest-ServerExternalEventsTest-336054206 tempest-ServerExternalEventsTest-336054206-project-member] Acquired lock "refresh_cache-d785944c-d65e-4a9e-91e5-0c0911b25227" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 724.411775] env[69367]: DEBUG nova.network.neutron [None req-21dbec28-a61b-45fb-99b0-7b920d8d1837 tempest-ServerExternalEventsTest-336054206 tempest-ServerExternalEventsTest-336054206-project-member] [instance: d785944c-d65e-4a9e-91e5-0c0911b25227] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 724.641077] env[69367]: DEBUG oslo_concurrency.lockutils [None req-cd36cff0-1cf6-4889-b011-15c7764840c6 tempest-InstanceActionsTestJSON-640821445 tempest-InstanceActionsTestJSON-640821445-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 724.745287] env[69367]: DEBUG oslo_vmware.api [None req-601e8c8f-7848-449b-9807-239b90966e66 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Task: {'id': task-4233907, 'name': PowerOffVM_Task, 'duration_secs': 0.222554} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.745565] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-601e8c8f-7848-449b-9807-239b90966e66 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] Powered off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 724.745737] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-601e8c8f-7848-449b-9807-239b90966e66 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] Unregistering the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 724.745994] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-118a8e52-7142-4b00-9feb-c33d4ce705e2 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.787625] env[69367]: DEBUG oslo_vmware.api [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4233908, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.814849] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-601e8c8f-7848-449b-9807-239b90966e66 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] Unregistered the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 724.815175] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-601e8c8f-7848-449b-9807-239b90966e66 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] Deleting contents of the VM from datastore datastore2 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 724.815543] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-601e8c8f-7848-449b-9807-239b90966e66 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Deleting the datastore file [datastore2] 788b843c-1496-4562-a761-44f3e1ce6da2 {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 724.815956] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b599ecbf-ee41-4dc9-8e9c-3d54bb9dad16 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.824545] env[69367]: DEBUG oslo_vmware.api [None req-601e8c8f-7848-449b-9807-239b90966e66 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Waiting for the task: (returnval){ [ 724.824545] env[69367]: value = "task-4233910" [ 724.824545] env[69367]: _type = "Task" [ 724.824545] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.835484] env[69367]: DEBUG oslo_vmware.api [None req-601e8c8f-7848-449b-9807-239b90966e66 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Task: {'id': task-4233910, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.934951] env[69367]: DEBUG nova.network.neutron [None req-21dbec28-a61b-45fb-99b0-7b920d8d1837 tempest-ServerExternalEventsTest-336054206 tempest-ServerExternalEventsTest-336054206-project-member] [instance: d785944c-d65e-4a9e-91e5-0c0911b25227] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 724.993743] env[69367]: DEBUG nova.network.neutron [None req-21dbec28-a61b-45fb-99b0-7b920d8d1837 tempest-ServerExternalEventsTest-336054206 tempest-ServerExternalEventsTest-336054206-project-member] [instance: d785944c-d65e-4a9e-91e5-0c0911b25227] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 724.999563] env[69367]: DEBUG oslo_concurrency.lockutils [None req-109a7ce6-4940-416c-adf7-78a1c5318ddc tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Acquiring lock "63b3fceb-2a10-4626-a09d-5943535ad98c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 724.999772] env[69367]: DEBUG oslo_concurrency.lockutils [None req-109a7ce6-4940-416c-adf7-78a1c5318ddc tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Lock "63b3fceb-2a10-4626-a09d-5943535ad98c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 725.287052] env[69367]: DEBUG oslo_vmware.api [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4233908, 'name': CreateSnapshot_Task, 'duration_secs': 0.804752} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.287343] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Created Snapshot of the VM instance {{(pid=69367) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 725.288135] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ebd3916-f9da-40f1-8179-26d679e88142 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.339862] env[69367]: DEBUG oslo_vmware.api [None req-601e8c8f-7848-449b-9807-239b90966e66 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Task: {'id': task-4233910, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.165327} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.341306] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-601e8c8f-7848-449b-9807-239b90966e66 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Deleted the datastore file {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 725.341624] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-601e8c8f-7848-449b-9807-239b90966e66 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] Deleted contents of the VM from datastore datastore2 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 725.341866] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-601e8c8f-7848-449b-9807-239b90966e66 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] Instance destroyed {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 725.342145] env[69367]: INFO nova.compute.manager [None req-601e8c8f-7848-449b-9807-239b90966e66 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] Took 1.13 seconds to destroy the instance on the hypervisor. [ 725.342695] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-601e8c8f-7848-449b-9807-239b90966e66 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 725.343194] env[69367]: DEBUG nova.compute.manager [-] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 725.343328] env[69367]: DEBUG nova.network.neutron [-] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 725.382048] env[69367]: INFO nova.scheduler.client.report [None req-4d7c067b-2e06-4f0e-b1ed-f10936ffec18 tempest-ServersNegativeTestMultiTenantJSON-1342218623 tempest-ServersNegativeTestMultiTenantJSON-1342218623-project-member] Deleted allocations for instance 13c1b1aa-2190-4d28-81ad-697e4c098897 [ 725.443077] env[69367]: DEBUG nova.scheduler.client.report [None req-b7f7cb48-f31d-4f35-8ea5-66b5e5394715 tempest-VolumesAdminNegativeTest-6889650 tempest-VolumesAdminNegativeTest-6889650-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 725.456210] env[69367]: DEBUG nova.scheduler.client.report [None req-b7f7cb48-f31d-4f35-8ea5-66b5e5394715 tempest-VolumesAdminNegativeTest-6889650 tempest-VolumesAdminNegativeTest-6889650-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 725.456459] env[69367]: DEBUG nova.compute.provider_tree [None req-b7f7cb48-f31d-4f35-8ea5-66b5e5394715 tempest-VolumesAdminNegativeTest-6889650 tempest-VolumesAdminNegativeTest-6889650-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 725.469236] env[69367]: DEBUG nova.scheduler.client.report [None req-b7f7cb48-f31d-4f35-8ea5-66b5e5394715 tempest-VolumesAdminNegativeTest-6889650 tempest-VolumesAdminNegativeTest-6889650-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 725.493172] env[69367]: DEBUG nova.scheduler.client.report [None req-b7f7cb48-f31d-4f35-8ea5-66b5e5394715 tempest-VolumesAdminNegativeTest-6889650 tempest-VolumesAdminNegativeTest-6889650-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 725.506320] env[69367]: DEBUG oslo_concurrency.lockutils [None req-21dbec28-a61b-45fb-99b0-7b920d8d1837 tempest-ServerExternalEventsTest-336054206 tempest-ServerExternalEventsTest-336054206-project-member] Releasing lock "refresh_cache-d785944c-d65e-4a9e-91e5-0c0911b25227" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 725.506320] env[69367]: DEBUG nova.compute.manager [None req-21dbec28-a61b-45fb-99b0-7b920d8d1837 tempest-ServerExternalEventsTest-336054206 tempest-ServerExternalEventsTest-336054206-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 725.506320] env[69367]: DEBUG nova.compute.manager [None req-21dbec28-a61b-45fb-99b0-7b920d8d1837 tempest-ServerExternalEventsTest-336054206 tempest-ServerExternalEventsTest-336054206-project-member] [instance: d785944c-d65e-4a9e-91e5-0c0911b25227] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 725.506320] env[69367]: DEBUG nova.network.neutron [None req-21dbec28-a61b-45fb-99b0-7b920d8d1837 tempest-ServerExternalEventsTest-336054206 tempest-ServerExternalEventsTest-336054206-project-member] [instance: d785944c-d65e-4a9e-91e5-0c0911b25227] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 725.526468] env[69367]: DEBUG nova.network.neutron [None req-21dbec28-a61b-45fb-99b0-7b920d8d1837 tempest-ServerExternalEventsTest-336054206 tempest-ServerExternalEventsTest-336054206-project-member] [instance: d785944c-d65e-4a9e-91e5-0c0911b25227] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 725.810337] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Creating linked-clone VM from snapshot {{(pid=69367) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 725.810791] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-c07540d9-85ac-4472-ab3e-37af632f19a9 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.824049] env[69367]: DEBUG oslo_vmware.api [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Waiting for the task: (returnval){ [ 725.824049] env[69367]: value = "task-4233911" [ 725.824049] env[69367]: _type = "Task" [ 725.824049] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.837409] env[69367]: DEBUG oslo_vmware.api [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4233911, 'name': CloneVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.894622] env[69367]: DEBUG oslo_concurrency.lockutils [None req-4d7c067b-2e06-4f0e-b1ed-f10936ffec18 tempest-ServersNegativeTestMultiTenantJSON-1342218623 tempest-ServersNegativeTestMultiTenantJSON-1342218623-project-member] Lock "13c1b1aa-2190-4d28-81ad-697e4c098897" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 43.007s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 725.930655] env[69367]: DEBUG nova.compute.manager [req-d223d722-d0dd-4b90-8b81-253b049e8d3c req-56c36681-5908-4490-bcad-12d684f9649c service nova] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] Received event network-vif-deleted-df0ab2ea-5a93-4792-b937-b8327787a7b4 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 725.930939] env[69367]: INFO nova.compute.manager [req-d223d722-d0dd-4b90-8b81-253b049e8d3c req-56c36681-5908-4490-bcad-12d684f9649c service nova] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] Neutron deleted interface df0ab2ea-5a93-4792-b937-b8327787a7b4; detaching it from the instance and deleting it from the info cache [ 725.931168] env[69367]: DEBUG nova.network.neutron [req-d223d722-d0dd-4b90-8b81-253b049e8d3c req-56c36681-5908-4490-bcad-12d684f9649c service nova] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 725.963929] env[69367]: DEBUG oslo_concurrency.lockutils [None req-9f96bab7-a5b1-41bd-b4f3-530a3a59f53f tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] Acquiring lock "022ca95b-30cc-41f1-be48-51fdfe1f0b14" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 725.963929] env[69367]: DEBUG oslo_concurrency.lockutils [None req-9f96bab7-a5b1-41bd-b4f3-530a3a59f53f tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] Lock "022ca95b-30cc-41f1-be48-51fdfe1f0b14" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 725.991210] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb7e546c-6450-445e-bd50-dafa4cf6f193 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.001954] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9cf037d-3469-4001-a4e4-9b223c460669 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.045592] env[69367]: DEBUG nova.network.neutron [None req-21dbec28-a61b-45fb-99b0-7b920d8d1837 tempest-ServerExternalEventsTest-336054206 tempest-ServerExternalEventsTest-336054206-project-member] [instance: d785944c-d65e-4a9e-91e5-0c0911b25227] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 726.048599] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-183c54d2-ce60-44fc-9eb6-27fa7946643e {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.059474] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee6beb01-6c62-478e-8d0a-54e02e85ff67 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.078279] env[69367]: DEBUG nova.compute.provider_tree [None req-b7f7cb48-f31d-4f35-8ea5-66b5e5394715 tempest-VolumesAdminNegativeTest-6889650 tempest-VolumesAdminNegativeTest-6889650-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 726.238530] env[69367]: DEBUG nova.network.neutron [-] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 726.294257] env[69367]: DEBUG oslo_concurrency.lockutils [None req-c1bd6450-02b3-47f3-be26-375686994005 tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] Acquiring lock "f3386485-a173-4f5d-8f29-4972df3ae468" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 726.294479] env[69367]: DEBUG oslo_concurrency.lockutils [None req-c1bd6450-02b3-47f3-be26-375686994005 tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] Lock "f3386485-a173-4f5d-8f29-4972df3ae468" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 726.334252] env[69367]: DEBUG oslo_vmware.api [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4233911, 'name': CloneVM_Task} progress is 94%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.398083] env[69367]: DEBUG nova.compute.manager [None req-05f963a1-2d97-4edd-9c3d-11be8b7b68ec tempest-ServerAddressesNegativeTestJSON-302550741 tempest-ServerAddressesNegativeTestJSON-302550741-project-member] [instance: e4db7bcc-26dd-4f0d-80da-655a58c80783] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 726.439591] env[69367]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d277bd46-72ad-492e-a677-97b79a1b2f38 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.449641] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28ed365e-f933-40f1-87a4-fa65a6469c07 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.477155] env[69367]: DEBUG nova.compute.manager [req-d223d722-d0dd-4b90-8b81-253b049e8d3c req-56c36681-5908-4490-bcad-12d684f9649c service nova] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] Detach interface failed, port_id=df0ab2ea-5a93-4792-b937-b8327787a7b4, reason: Instance 788b843c-1496-4562-a761-44f3e1ce6da2 could not be found. {{(pid=69367) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11601}} [ 726.553735] env[69367]: INFO nova.compute.manager [None req-21dbec28-a61b-45fb-99b0-7b920d8d1837 tempest-ServerExternalEventsTest-336054206 tempest-ServerExternalEventsTest-336054206-project-member] [instance: d785944c-d65e-4a9e-91e5-0c0911b25227] Took 1.05 seconds to deallocate network for instance. [ 726.589491] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2f962397-ad7d-47ec-8c2c-a1310a8036a7 tempest-ServersAdminNegativeTestJSON-2086084525 tempest-ServersAdminNegativeTestJSON-2086084525-project-member] Acquiring lock "236173c7-9464-44b5-83a5-6ff60eedcc6a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 726.589491] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2f962397-ad7d-47ec-8c2c-a1310a8036a7 tempest-ServersAdminNegativeTestJSON-2086084525 tempest-ServersAdminNegativeTestJSON-2086084525-project-member] Lock "236173c7-9464-44b5-83a5-6ff60eedcc6a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 726.603022] env[69367]: ERROR nova.scheduler.client.report [None req-b7f7cb48-f31d-4f35-8ea5-66b5e5394715 tempest-VolumesAdminNegativeTest-6889650 tempest-VolumesAdminNegativeTest-6889650-project-member] [req-3984e9ab-8316-4a19-aa47-0bccd53f73f2] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-3984e9ab-8316-4a19-aa47-0bccd53f73f2"}]} [ 726.603022] env[69367]: DEBUG oslo_concurrency.lockutils [None req-b7f7cb48-f31d-4f35-8ea5-66b5e5394715 tempest-VolumesAdminNegativeTest-6889650 tempest-VolumesAdminNegativeTest-6889650-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.203s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 726.603368] env[69367]: ERROR nova.compute.manager [None req-b7f7cb48-f31d-4f35-8ea5-66b5e5394715 tempest-VolumesAdminNegativeTest-6889650 tempest-VolumesAdminNegativeTest-6889650-project-member] [instance: de68478e-475a-45ef-9eed-44904fcfc1fd] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 726.603368] env[69367]: ERROR nova.compute.manager [instance: de68478e-475a-45ef-9eed-44904fcfc1fd] Traceback (most recent call last): [ 726.603368] env[69367]: ERROR nova.compute.manager [instance: de68478e-475a-45ef-9eed-44904fcfc1fd] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 726.603368] env[69367]: ERROR nova.compute.manager [instance: de68478e-475a-45ef-9eed-44904fcfc1fd] yield [ 726.603368] env[69367]: ERROR nova.compute.manager [instance: de68478e-475a-45ef-9eed-44904fcfc1fd] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 726.603368] env[69367]: ERROR nova.compute.manager [instance: de68478e-475a-45ef-9eed-44904fcfc1fd] self.set_inventory_for_provider( [ 726.603368] env[69367]: ERROR nova.compute.manager [instance: de68478e-475a-45ef-9eed-44904fcfc1fd] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 726.603368] env[69367]: ERROR nova.compute.manager [instance: de68478e-475a-45ef-9eed-44904fcfc1fd] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 726.603679] env[69367]: ERROR nova.compute.manager [instance: de68478e-475a-45ef-9eed-44904fcfc1fd] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-3984e9ab-8316-4a19-aa47-0bccd53f73f2"}]} [ 726.603679] env[69367]: ERROR nova.compute.manager [instance: de68478e-475a-45ef-9eed-44904fcfc1fd] [ 726.603679] env[69367]: ERROR nova.compute.manager [instance: de68478e-475a-45ef-9eed-44904fcfc1fd] During handling of the above exception, another exception occurred: [ 726.603679] env[69367]: ERROR nova.compute.manager [instance: de68478e-475a-45ef-9eed-44904fcfc1fd] [ 726.603679] env[69367]: ERROR nova.compute.manager [instance: de68478e-475a-45ef-9eed-44904fcfc1fd] Traceback (most recent call last): [ 726.603679] env[69367]: ERROR nova.compute.manager [instance: de68478e-475a-45ef-9eed-44904fcfc1fd] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 726.603679] env[69367]: ERROR nova.compute.manager [instance: de68478e-475a-45ef-9eed-44904fcfc1fd] with self.rt.instance_claim(context, instance, node, allocs, [ 726.603679] env[69367]: ERROR nova.compute.manager [instance: de68478e-475a-45ef-9eed-44904fcfc1fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 726.603679] env[69367]: ERROR nova.compute.manager [instance: de68478e-475a-45ef-9eed-44904fcfc1fd] return f(*args, **kwargs) [ 726.604009] env[69367]: ERROR nova.compute.manager [instance: de68478e-475a-45ef-9eed-44904fcfc1fd] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 726.604009] env[69367]: ERROR nova.compute.manager [instance: de68478e-475a-45ef-9eed-44904fcfc1fd] self._update(elevated, cn) [ 726.604009] env[69367]: ERROR nova.compute.manager [instance: de68478e-475a-45ef-9eed-44904fcfc1fd] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 726.604009] env[69367]: ERROR nova.compute.manager [instance: de68478e-475a-45ef-9eed-44904fcfc1fd] self._update_to_placement(context, compute_node, startup) [ 726.604009] env[69367]: ERROR nova.compute.manager [instance: de68478e-475a-45ef-9eed-44904fcfc1fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 726.604009] env[69367]: ERROR nova.compute.manager [instance: de68478e-475a-45ef-9eed-44904fcfc1fd] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 726.604009] env[69367]: ERROR nova.compute.manager [instance: de68478e-475a-45ef-9eed-44904fcfc1fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 726.604009] env[69367]: ERROR nova.compute.manager [instance: de68478e-475a-45ef-9eed-44904fcfc1fd] return attempt.get(self._wrap_exception) [ 726.604009] env[69367]: ERROR nova.compute.manager [instance: de68478e-475a-45ef-9eed-44904fcfc1fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 726.604009] env[69367]: ERROR nova.compute.manager [instance: de68478e-475a-45ef-9eed-44904fcfc1fd] six.reraise(self.value[0], self.value[1], self.value[2]) [ 726.604009] env[69367]: ERROR nova.compute.manager [instance: de68478e-475a-45ef-9eed-44904fcfc1fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 726.604009] env[69367]: ERROR nova.compute.manager [instance: de68478e-475a-45ef-9eed-44904fcfc1fd] raise value [ 726.604009] env[69367]: ERROR nova.compute.manager [instance: de68478e-475a-45ef-9eed-44904fcfc1fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 726.604372] env[69367]: ERROR nova.compute.manager [instance: de68478e-475a-45ef-9eed-44904fcfc1fd] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 726.604372] env[69367]: ERROR nova.compute.manager [instance: de68478e-475a-45ef-9eed-44904fcfc1fd] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 726.604372] env[69367]: ERROR nova.compute.manager [instance: de68478e-475a-45ef-9eed-44904fcfc1fd] self.reportclient.update_from_provider_tree( [ 726.604372] env[69367]: ERROR nova.compute.manager [instance: de68478e-475a-45ef-9eed-44904fcfc1fd] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 726.604372] env[69367]: ERROR nova.compute.manager [instance: de68478e-475a-45ef-9eed-44904fcfc1fd] with catch_all(pd.uuid): [ 726.604372] env[69367]: ERROR nova.compute.manager [instance: de68478e-475a-45ef-9eed-44904fcfc1fd] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 726.604372] env[69367]: ERROR nova.compute.manager [instance: de68478e-475a-45ef-9eed-44904fcfc1fd] self.gen.throw(typ, value, traceback) [ 726.604372] env[69367]: ERROR nova.compute.manager [instance: de68478e-475a-45ef-9eed-44904fcfc1fd] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 726.604372] env[69367]: ERROR nova.compute.manager [instance: de68478e-475a-45ef-9eed-44904fcfc1fd] raise exception.ResourceProviderSyncFailed() [ 726.604372] env[69367]: ERROR nova.compute.manager [instance: de68478e-475a-45ef-9eed-44904fcfc1fd] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 726.604372] env[69367]: ERROR nova.compute.manager [instance: de68478e-475a-45ef-9eed-44904fcfc1fd] [ 726.604690] env[69367]: DEBUG nova.compute.utils [None req-b7f7cb48-f31d-4f35-8ea5-66b5e5394715 tempest-VolumesAdminNegativeTest-6889650 tempest-VolumesAdminNegativeTest-6889650-project-member] [instance: de68478e-475a-45ef-9eed-44904fcfc1fd] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 726.605450] env[69367]: DEBUG oslo_concurrency.lockutils [None req-8034ed55-f905-44d0-a144-125588b682dc tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.469s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 726.605672] env[69367]: DEBUG nova.objects.instance [None req-8034ed55-f905-44d0-a144-125588b682dc tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Lazy-loading 'resources' on Instance uuid fa4a5dbc-b885-4439-8520-0bfff38438b3 {{(pid=69367) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 726.607481] env[69367]: DEBUG nova.compute.manager [None req-b7f7cb48-f31d-4f35-8ea5-66b5e5394715 tempest-VolumesAdminNegativeTest-6889650 tempest-VolumesAdminNegativeTest-6889650-project-member] [instance: de68478e-475a-45ef-9eed-44904fcfc1fd] Build of instance de68478e-475a-45ef-9eed-44904fcfc1fd was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 726.607900] env[69367]: DEBUG nova.compute.manager [None req-b7f7cb48-f31d-4f35-8ea5-66b5e5394715 tempest-VolumesAdminNegativeTest-6889650 tempest-VolumesAdminNegativeTest-6889650-project-member] [instance: de68478e-475a-45ef-9eed-44904fcfc1fd] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 726.608140] env[69367]: DEBUG oslo_concurrency.lockutils [None req-b7f7cb48-f31d-4f35-8ea5-66b5e5394715 tempest-VolumesAdminNegativeTest-6889650 tempest-VolumesAdminNegativeTest-6889650-project-member] Acquiring lock "refresh_cache-de68478e-475a-45ef-9eed-44904fcfc1fd" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 726.608332] env[69367]: DEBUG oslo_concurrency.lockutils [None req-b7f7cb48-f31d-4f35-8ea5-66b5e5394715 tempest-VolumesAdminNegativeTest-6889650 tempest-VolumesAdminNegativeTest-6889650-project-member] Acquired lock "refresh_cache-de68478e-475a-45ef-9eed-44904fcfc1fd" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 726.608504] env[69367]: DEBUG nova.network.neutron [None req-b7f7cb48-f31d-4f35-8ea5-66b5e5394715 tempest-VolumesAdminNegativeTest-6889650 tempest-VolumesAdminNegativeTest-6889650-project-member] [instance: de68478e-475a-45ef-9eed-44904fcfc1fd] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 726.742090] env[69367]: INFO nova.compute.manager [-] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] Took 1.40 seconds to deallocate network for instance. [ 726.837858] env[69367]: DEBUG oslo_vmware.api [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4233911, 'name': CloneVM_Task} progress is 94%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.889440] env[69367]: DEBUG oslo_concurrency.lockutils [None req-515f9e32-548d-413f-84ea-987c928671cb tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] Acquiring lock "8ee84a56-cc49-4056-b561-aa1f2b10a06c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 726.889440] env[69367]: DEBUG oslo_concurrency.lockutils [None req-515f9e32-548d-413f-84ea-987c928671cb tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] Lock "8ee84a56-cc49-4056-b561-aa1f2b10a06c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 726.934022] env[69367]: DEBUG oslo_concurrency.lockutils [None req-05f963a1-2d97-4edd-9c3d-11be8b7b68ec tempest-ServerAddressesNegativeTestJSON-302550741 tempest-ServerAddressesNegativeTestJSON-302550741-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 727.131524] env[69367]: DEBUG nova.scheduler.client.report [None req-8034ed55-f905-44d0-a144-125588b682dc tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 727.146385] env[69367]: DEBUG nova.network.neutron [None req-b7f7cb48-f31d-4f35-8ea5-66b5e5394715 tempest-VolumesAdminNegativeTest-6889650 tempest-VolumesAdminNegativeTest-6889650-project-member] [instance: de68478e-475a-45ef-9eed-44904fcfc1fd] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 727.149227] env[69367]: DEBUG nova.scheduler.client.report [None req-8034ed55-f905-44d0-a144-125588b682dc tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 727.149496] env[69367]: DEBUG nova.compute.provider_tree [None req-8034ed55-f905-44d0-a144-125588b682dc tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 727.163591] env[69367]: DEBUG nova.scheduler.client.report [None req-8034ed55-f905-44d0-a144-125588b682dc tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 727.184150] env[69367]: DEBUG nova.scheduler.client.report [None req-8034ed55-f905-44d0-a144-125588b682dc tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 727.248719] env[69367]: DEBUG oslo_concurrency.lockutils [None req-601e8c8f-7848-449b-9807-239b90966e66 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 727.320361] env[69367]: DEBUG nova.network.neutron [None req-b7f7cb48-f31d-4f35-8ea5-66b5e5394715 tempest-VolumesAdminNegativeTest-6889650 tempest-VolumesAdminNegativeTest-6889650-project-member] [instance: de68478e-475a-45ef-9eed-44904fcfc1fd] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 727.341442] env[69367]: DEBUG oslo_vmware.api [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4233911, 'name': CloneVM_Task, 'duration_secs': 1.470302} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.341712] env[69367]: INFO nova.virt.vmwareapi.vmops [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Created linked-clone VM from snapshot [ 727.342558] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e979b13-9574-4b5f-9951-2f2f98cfb926 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.354368] env[69367]: DEBUG nova.virt.vmwareapi.images [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Uploading image 38eb2f32-4943-4313-9612-8fd99366b9f1 {{(pid=69367) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 727.387183] env[69367]: DEBUG oslo_vmware.rw_handles [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 727.387183] env[69367]: value = "vm-837708" [ 727.387183] env[69367]: _type = "VirtualMachine" [ 727.387183] env[69367]: }. {{(pid=69367) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 727.387468] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-0632e60e-ab61-474c-b370-e9ded097d2e7 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.402590] env[69367]: DEBUG oslo_vmware.rw_handles [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Lease: (returnval){ [ 727.402590] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52d4e5b4-9e47-025d-b422-07920f848f02" [ 727.402590] env[69367]: _type = "HttpNfcLease" [ 727.402590] env[69367]: } obtained for exporting VM: (result){ [ 727.402590] env[69367]: value = "vm-837708" [ 727.402590] env[69367]: _type = "VirtualMachine" [ 727.402590] env[69367]: }. {{(pid=69367) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 727.402804] env[69367]: DEBUG oslo_vmware.api [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Waiting for the lease: (returnval){ [ 727.402804] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52d4e5b4-9e47-025d-b422-07920f848f02" [ 727.402804] env[69367]: _type = "HttpNfcLease" [ 727.402804] env[69367]: } to be ready. {{(pid=69367) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 727.411927] env[69367]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 727.411927] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52d4e5b4-9e47-025d-b422-07920f848f02" [ 727.411927] env[69367]: _type = "HttpNfcLease" [ 727.411927] env[69367]: } is initializing. {{(pid=69367) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 727.595475] env[69367]: INFO nova.scheduler.client.report [None req-21dbec28-a61b-45fb-99b0-7b920d8d1837 tempest-ServerExternalEventsTest-336054206 tempest-ServerExternalEventsTest-336054206-project-member] Deleted allocations for instance d785944c-d65e-4a9e-91e5-0c0911b25227 [ 727.734792] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b48249b4-3b7b-4aef-98f4-5ed24c794eff {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.743229] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd049b5e-ec2b-4528-ae09-c33a89890da7 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.779214] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19f53170-bbb2-442b-9a62-ddde8ccd3e04 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.788085] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02b7791c-efb6-4224-af4a-96050274b8a3 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.805736] env[69367]: DEBUG nova.compute.provider_tree [None req-8034ed55-f905-44d0-a144-125588b682dc tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Inventory has not changed in ProviderTree for provider: 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 727.824228] env[69367]: DEBUG oslo_concurrency.lockutils [None req-b7f7cb48-f31d-4f35-8ea5-66b5e5394715 tempest-VolumesAdminNegativeTest-6889650 tempest-VolumesAdminNegativeTest-6889650-project-member] Releasing lock "refresh_cache-de68478e-475a-45ef-9eed-44904fcfc1fd" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 727.824559] env[69367]: DEBUG nova.compute.manager [None req-b7f7cb48-f31d-4f35-8ea5-66b5e5394715 tempest-VolumesAdminNegativeTest-6889650 tempest-VolumesAdminNegativeTest-6889650-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 727.824824] env[69367]: DEBUG nova.compute.manager [None req-b7f7cb48-f31d-4f35-8ea5-66b5e5394715 tempest-VolumesAdminNegativeTest-6889650 tempest-VolumesAdminNegativeTest-6889650-project-member] [instance: de68478e-475a-45ef-9eed-44904fcfc1fd] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 727.826032] env[69367]: DEBUG nova.network.neutron [None req-b7f7cb48-f31d-4f35-8ea5-66b5e5394715 tempest-VolumesAdminNegativeTest-6889650 tempest-VolumesAdminNegativeTest-6889650-project-member] [instance: de68478e-475a-45ef-9eed-44904fcfc1fd] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 727.850658] env[69367]: DEBUG nova.network.neutron [None req-b7f7cb48-f31d-4f35-8ea5-66b5e5394715 tempest-VolumesAdminNegativeTest-6889650 tempest-VolumesAdminNegativeTest-6889650-project-member] [instance: de68478e-475a-45ef-9eed-44904fcfc1fd] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 727.912881] env[69367]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 727.912881] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52d4e5b4-9e47-025d-b422-07920f848f02" [ 727.912881] env[69367]: _type = "HttpNfcLease" [ 727.912881] env[69367]: } is ready. {{(pid=69367) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 727.912881] env[69367]: DEBUG oslo_vmware.rw_handles [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 727.912881] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52d4e5b4-9e47-025d-b422-07920f848f02" [ 727.912881] env[69367]: _type = "HttpNfcLease" [ 727.912881] env[69367]: }. {{(pid=69367) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 727.913294] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a359f36a-cbe0-4dcb-85a9-c4507558f43b {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.922279] env[69367]: DEBUG oslo_vmware.rw_handles [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b7c0c8-67fa-649c-5553-bc3fcfa3f840/disk-0.vmdk from lease info. {{(pid=69367) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 727.922467] env[69367]: DEBUG oslo_vmware.rw_handles [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b7c0c8-67fa-649c-5553-bc3fcfa3f840/disk-0.vmdk for reading. {{(pid=69367) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 728.017059] env[69367]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-200c49e8-a077-4c8a-a999-ff0a01176072 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.108794] env[69367]: DEBUG oslo_concurrency.lockutils [None req-21dbec28-a61b-45fb-99b0-7b920d8d1837 tempest-ServerExternalEventsTest-336054206 tempest-ServerExternalEventsTest-336054206-project-member] Lock "d785944c-d65e-4a9e-91e5-0c0911b25227" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.882s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 728.309190] env[69367]: DEBUG nova.scheduler.client.report [None req-8034ed55-f905-44d0-a144-125588b682dc tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Inventory has not changed for provider 19ddf8be-7305-4f70-8366-52a9957232e6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 728.356251] env[69367]: DEBUG nova.network.neutron [None req-b7f7cb48-f31d-4f35-8ea5-66b5e5394715 tempest-VolumesAdminNegativeTest-6889650 tempest-VolumesAdminNegativeTest-6889650-project-member] [instance: de68478e-475a-45ef-9eed-44904fcfc1fd] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 728.613423] env[69367]: DEBUG nova.compute.manager [None req-5c130717-e658-4600-96f7-a983167af7af tempest-ServersTestFqdnHostnames-2115646556 tempest-ServersTestFqdnHostnames-2115646556-project-member] [instance: 40e49f7b-e5f7-4673-a764-d8cec8a3cf18] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 728.815256] env[69367]: DEBUG oslo_concurrency.lockutils [None req-8034ed55-f905-44d0-a144-125588b682dc tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.210s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 728.819402] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.594s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 728.822300] env[69367]: INFO nova.compute.claims [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 728.851438] env[69367]: INFO nova.scheduler.client.report [None req-8034ed55-f905-44d0-a144-125588b682dc tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Deleted allocations for instance fa4a5dbc-b885-4439-8520-0bfff38438b3 [ 728.862459] env[69367]: INFO nova.compute.manager [None req-b7f7cb48-f31d-4f35-8ea5-66b5e5394715 tempest-VolumesAdminNegativeTest-6889650 tempest-VolumesAdminNegativeTest-6889650-project-member] [instance: de68478e-475a-45ef-9eed-44904fcfc1fd] Took 1.04 seconds to deallocate network for instance. [ 729.147872] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5c130717-e658-4600-96f7-a983167af7af tempest-ServersTestFqdnHostnames-2115646556 tempest-ServersTestFqdnHostnames-2115646556-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 729.360062] env[69367]: DEBUG oslo_concurrency.lockutils [None req-8034ed55-f905-44d0-a144-125588b682dc tempest-ServerTagsTestJSON-810630044 tempest-ServerTagsTestJSON-810630044-project-member] Lock "fa4a5dbc-b885-4439-8520-0bfff38438b3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.272s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 729.911365] env[69367]: INFO nova.scheduler.client.report [None req-b7f7cb48-f31d-4f35-8ea5-66b5e5394715 tempest-VolumesAdminNegativeTest-6889650 tempest-VolumesAdminNegativeTest-6889650-project-member] Deleted allocations for instance de68478e-475a-45ef-9eed-44904fcfc1fd [ 730.396753] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f2536db-5491-4b46-a8fe-4331d8935a66 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.408157] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-776fa674-0b46-4a50-9c4b-d9c9fd9fbd3c {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.449928] env[69367]: DEBUG oslo_concurrency.lockutils [None req-b7f7cb48-f31d-4f35-8ea5-66b5e5394715 tempest-VolumesAdminNegativeTest-6889650 tempest-VolumesAdminNegativeTest-6889650-project-member] Lock "de68478e-475a-45ef-9eed-44904fcfc1fd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.332s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 730.453220] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ade526a4-7fe8-4dcf-93c9-0601652f0a78 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.465264] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8b44370-42fb-49de-9f4b-c328c8e99ad5 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.484026] env[69367]: DEBUG nova.compute.provider_tree [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Inventory has not changed in ProviderTree for provider: 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 730.959098] env[69367]: DEBUG nova.compute.manager [None req-abe26bce-ad1a-4a45-984f-937494cbba7c tempest-ServersTestJSON-1126084006 tempest-ServersTestJSON-1126084006-project-member] [instance: d6c2606d-0c6c-4add-b6f5-8229c21b56be] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 730.987272] env[69367]: DEBUG nova.scheduler.client.report [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Inventory has not changed for provider 19ddf8be-7305-4f70-8366-52a9957232e6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 731.489784] env[69367]: DEBUG oslo_concurrency.lockutils [None req-abe26bce-ad1a-4a45-984f-937494cbba7c tempest-ServersTestJSON-1126084006 tempest-ServersTestJSON-1126084006-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 731.495023] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.676s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 731.495594] env[69367]: DEBUG nova.compute.manager [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] Start building networks asynchronously for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 731.498620] env[69367]: DEBUG oslo_concurrency.lockutils [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.286s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 731.500183] env[69367]: INFO nova.compute.claims [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 732.008343] env[69367]: DEBUG nova.compute.utils [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Using /dev/sd instead of None {{(pid=69367) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 732.014702] env[69367]: DEBUG nova.compute.manager [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] Not allocating networking since 'none' was specified. {{(pid=69367) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 732.452962] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a085c230-35c0-4225-8a3b-f135e0165425 tempest-VolumesAdminNegativeTest-6889650 tempest-VolumesAdminNegativeTest-6889650-project-member] Acquiring lock "d7009e78-b9f4-47e8-ba29-dfc710bef8ad" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 732.453341] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a085c230-35c0-4225-8a3b-f135e0165425 tempest-VolumesAdminNegativeTest-6889650 tempest-VolumesAdminNegativeTest-6889650-project-member] Lock "d7009e78-b9f4-47e8-ba29-dfc710bef8ad" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 732.515462] env[69367]: DEBUG nova.compute.manager [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] Start building block device mappings for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 733.082356] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9184e1d8-1112-48d7-9db4-b60a6c7708b6 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.091591] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9e9122f-c6b7-410b-8872-88ad4743efc1 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.128683] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89c2c97c-e319-42d4-a6a5-9018169ed73d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.137254] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76ffaf5c-1de2-4978-a66d-8f6296e8de75 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.155941] env[69367]: DEBUG nova.compute.provider_tree [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Inventory has not changed in ProviderTree for provider: 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 733.533358] env[69367]: DEBUG nova.compute.manager [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] Start spawning the instance on the hypervisor. {{(pid=69367) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 733.563364] env[69367]: DEBUG nova.virt.hardware [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-19T12:49:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-19T12:49:28Z,direct_url=,disk_format='vmdk',id=2b099420-9152-4d93-9609-4c9317824c11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cd7c200d5cd6461fb951580f8c764c42',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-19T12:49:29Z,virtual_size=,visibility=), allow threads: False {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 733.563507] env[69367]: DEBUG nova.virt.hardware [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Flavor limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 733.563621] env[69367]: DEBUG nova.virt.hardware [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Image limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 733.563806] env[69367]: DEBUG nova.virt.hardware [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Flavor pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 733.563952] env[69367]: DEBUG nova.virt.hardware [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Image pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 733.564467] env[69367]: DEBUG nova.virt.hardware [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 733.565465] env[69367]: DEBUG nova.virt.hardware [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 733.565806] env[69367]: DEBUG nova.virt.hardware [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 733.566068] env[69367]: DEBUG nova.virt.hardware [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Got 1 possible topologies {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 733.566263] env[69367]: DEBUG nova.virt.hardware [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 733.566469] env[69367]: DEBUG nova.virt.hardware [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 733.567374] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b316abb7-01d0-4add-9de4-10282e9ab156 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.579017] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79ce5787-42d0-4bf3-85a7-c0701dec1bad {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.594776] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] Instance VIF info [] {{(pid=69367) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 733.600757] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Creating folder: Project (45ed50a35bbb49dfb51ad8a8e4d4cd6a). Parent ref: group-v837645. {{(pid=69367) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 733.601152] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0464145b-3bcb-4fc6-86a4-173931728af0 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.613435] env[69367]: INFO nova.virt.vmwareapi.vm_util [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Created folder: Project (45ed50a35bbb49dfb51ad8a8e4d4cd6a) in parent group-v837645. [ 733.613662] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Creating folder: Instances. Parent ref: group-v837709. {{(pid=69367) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 733.613935] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-81586df9-b11d-4388-a418-e6896ba046f2 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.625800] env[69367]: INFO nova.virt.vmwareapi.vm_util [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Created folder: Instances in parent group-v837709. [ 733.626100] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 733.626323] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] Creating VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 733.626627] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8d5ce65c-3b51-40cc-b298-df8447f24623 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.645662] env[69367]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 733.645662] env[69367]: value = "task-4233915" [ 733.645662] env[69367]: _type = "Task" [ 733.645662] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.654563] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4233915, 'name': CreateVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.658713] env[69367]: DEBUG nova.scheduler.client.report [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Inventory has not changed for provider 19ddf8be-7305-4f70-8366-52a9957232e6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 734.161534] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4233915, 'name': CreateVM_Task, 'duration_secs': 0.351683} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.162816] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] Created VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 734.163888] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 734.164305] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 734.167229] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 734.167229] env[69367]: DEBUG oslo_concurrency.lockutils [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.668s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 734.167229] env[69367]: DEBUG nova.compute.manager [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] Start building networks asynchronously for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 734.170983] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-72f0d704-419e-497d-800d-04fbd8c71e6c {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.175298] env[69367]: DEBUG oslo_concurrency.lockutils [None req-02258f01-0f7a-47d5-8f19-e7e1d8fc8a57 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.524s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 734.175736] env[69367]: DEBUG oslo_concurrency.lockutils [None req-02258f01-0f7a-47d5-8f19-e7e1d8fc8a57 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 734.176156] env[69367]: INFO nova.compute.manager [None req-02258f01-0f7a-47d5-8f19-e7e1d8fc8a57 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: c17525ee-d038-4c81-932b-ed74a6de6cb5] Successfully reverted task state from None on failure for instance. [ 734.179827] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.484s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 734.183139] env[69367]: INFO nova.compute.claims [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 734.191090] env[69367]: ERROR oslo_messaging.rpc.server [None req-02258f01-0f7a-47d5-8f19-e7e1d8fc8a57 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Exception during message handling: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 734.191090] env[69367]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 734.191090] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 734.191090] env[69367]: ERROR oslo_messaging.rpc.server yield [ 734.191090] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 734.191090] env[69367]: ERROR oslo_messaging.rpc.server self.set_inventory_for_provider( [ 734.191090] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 734.191090] env[69367]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 734.191090] env[69367]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-c40d6371-86b9-4fe9-80af-cced8aac3b7b"}]} [ 734.191090] env[69367]: ERROR oslo_messaging.rpc.server [ 734.191607] env[69367]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 734.191607] env[69367]: ERROR oslo_messaging.rpc.server [ 734.191607] env[69367]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 734.191607] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 174, in _process_incoming [ 734.191607] env[69367]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 734.191607] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 734.191607] env[69367]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 734.191607] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 734.191607] env[69367]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 734.191607] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 734.191607] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 734.191607] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 734.191607] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 734.191607] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 734.191607] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 734.191607] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 734.191607] env[69367]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 734.191607] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 167, in decorated_function [ 734.192386] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 734.192386] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 734.192386] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 734.192386] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 734.192386] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 734.192386] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 158, in decorated_function [ 734.192386] env[69367]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 734.192386] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1483, in decorated_function [ 734.192386] env[69367]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 734.192386] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 214, in decorated_function [ 734.192386] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 734.192386] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 734.192386] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 734.192386] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 734.192386] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 734.192386] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 204, in decorated_function [ 734.192386] env[69367]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 734.192386] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3388, in terminate_instance [ 734.193152] env[69367]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 734.193152] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 734.193152] env[69367]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 734.193152] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3383, in do_terminate_instance [ 734.193152] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 734.193152] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 734.193152] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 734.193152] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 734.193152] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 734.193152] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 734.193152] env[69367]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 734.193152] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 734.193152] env[69367]: ERROR oslo_messaging.rpc.server self._complete_deletion(context, instance) [ 734.193152] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 734.193152] env[69367]: ERROR oslo_messaging.rpc.server self._update_resource_tracker(context, instance) [ 734.193152] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 734.193152] env[69367]: ERROR oslo_messaging.rpc.server self.rt.update_usage(context, instance, instance.node) [ 734.193152] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 734.193926] env[69367]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 734.193926] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 734.193926] env[69367]: ERROR oslo_messaging.rpc.server self._update(context.elevated(), self.compute_nodes[nodename]) [ 734.193926] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 734.193926] env[69367]: ERROR oslo_messaging.rpc.server self._update_to_placement(context, compute_node, startup) [ 734.193926] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 734.193926] env[69367]: ERROR oslo_messaging.rpc.server return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 734.193926] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 734.193926] env[69367]: ERROR oslo_messaging.rpc.server return attempt.get(self._wrap_exception) [ 734.193926] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 734.193926] env[69367]: ERROR oslo_messaging.rpc.server six.reraise(self.value[0], self.value[1], self.value[2]) [ 734.193926] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 734.193926] env[69367]: ERROR oslo_messaging.rpc.server raise value [ 734.193926] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 734.193926] env[69367]: ERROR oslo_messaging.rpc.server attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 734.193926] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 734.193926] env[69367]: ERROR oslo_messaging.rpc.server self.reportclient.update_from_provider_tree( [ 734.195024] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 734.195024] env[69367]: ERROR oslo_messaging.rpc.server with catch_all(pd.uuid): [ 734.195024] env[69367]: ERROR oslo_messaging.rpc.server File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 734.195024] env[69367]: ERROR oslo_messaging.rpc.server self.gen.throw(typ, value, traceback) [ 734.195024] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 734.195024] env[69367]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderSyncFailed() [ 734.195024] env[69367]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 734.195024] env[69367]: ERROR oslo_messaging.rpc.server [ 734.199025] env[69367]: DEBUG oslo_vmware.api [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Waiting for the task: (returnval){ [ 734.199025] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52f8efe5-acb4-bce4-202f-313cdfeed386" [ 734.199025] env[69367]: _type = "Task" [ 734.199025] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.211948] env[69367]: DEBUG oslo_vmware.api [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52f8efe5-acb4-bce4-202f-313cdfeed386, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.621662] env[69367]: DEBUG oslo_concurrency.lockutils [None req-71e59610-5b9a-47a8-a1c2-ad48771fbdde tempest-ServerShowV247Test-1137483895 tempest-ServerShowV247Test-1137483895-project-member] Acquiring lock "f0522b69-b593-404e-8f24-b6c5c6c8b2e4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 734.621662] env[69367]: DEBUG oslo_concurrency.lockutils [None req-71e59610-5b9a-47a8-a1c2-ad48771fbdde tempest-ServerShowV247Test-1137483895 tempest-ServerShowV247Test-1137483895-project-member] Lock "f0522b69-b593-404e-8f24-b6c5c6c8b2e4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 734.678040] env[69367]: DEBUG nova.compute.utils [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Using /dev/sd instead of None {{(pid=69367) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 734.680357] env[69367]: DEBUG nova.compute.manager [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] Allocating IP information in the background. {{(pid=69367) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 734.680467] env[69367]: DEBUG nova.network.neutron [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] allocate_for_instance() {{(pid=69367) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 734.712380] env[69367]: DEBUG oslo_vmware.api [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52f8efe5-acb4-bce4-202f-313cdfeed386, 'name': SearchDatastore_Task, 'duration_secs': 0.015414} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.713163] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 734.713601] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] Processing image 2b099420-9152-4d93-9609-4c9317824c11 {{(pid=69367) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 734.714141] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 734.714421] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 734.714572] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 734.714906] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-17092d77-6be8-4d3b-b520-c6217919e95d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.728701] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 734.728900] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69367) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 734.731592] env[69367]: DEBUG nova.policy [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '38e470e859504c7c9f283fcbcdc0dc0e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '82caa54483a54af1870eab2fb0d6ca2c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69367) authorize /opt/stack/nova/nova/policy.py:192}} [ 734.733406] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e980ac59-8384-4634-8746-35e41f161099 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.743764] env[69367]: DEBUG oslo_concurrency.lockutils [None req-c3fd4c24-68da-4e55-bd81-27f04690e5d7 tempest-ImagesOneServerTestJSON-993522534 tempest-ImagesOneServerTestJSON-993522534-project-member] Acquiring lock "3f6a67a9-08db-4a15-ae07-bef02b9a6d48" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 734.744062] env[69367]: DEBUG oslo_concurrency.lockutils [None req-c3fd4c24-68da-4e55-bd81-27f04690e5d7 tempest-ImagesOneServerTestJSON-993522534 tempest-ImagesOneServerTestJSON-993522534-project-member] Lock "3f6a67a9-08db-4a15-ae07-bef02b9a6d48" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 734.744368] env[69367]: DEBUG oslo_vmware.api [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Waiting for the task: (returnval){ [ 734.744368] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52a4e82d-d4dc-e1a1-ce44-d8da9878e267" [ 734.744368] env[69367]: _type = "Task" [ 734.744368] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.759449] env[69367]: DEBUG oslo_vmware.api [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52a4e82d-d4dc-e1a1-ce44-d8da9878e267, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.039576] env[69367]: DEBUG nova.network.neutron [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] Successfully created port: b4b0bb3d-a7c4-42e8-99f2-9078bf5e2b6c {{(pid=69367) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 735.185366] env[69367]: DEBUG nova.compute.manager [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] Start building block device mappings for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 735.212795] env[69367]: DEBUG oslo_concurrency.lockutils [None req-b883f15e-cdb3-40b5-b382-c4d5549547fd tempest-ServerShowV247Test-1137483895 tempest-ServerShowV247Test-1137483895-project-member] Acquiring lock "17ffa2b1-4a0a-4e14-a7b0-104791adf072" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 735.213096] env[69367]: DEBUG oslo_concurrency.lockutils [None req-b883f15e-cdb3-40b5-b382-c4d5549547fd tempest-ServerShowV247Test-1137483895 tempest-ServerShowV247Test-1137483895-project-member] Lock "17ffa2b1-4a0a-4e14-a7b0-104791adf072" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 735.259380] env[69367]: DEBUG oslo_vmware.api [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52a4e82d-d4dc-e1a1-ce44-d8da9878e267, 'name': SearchDatastore_Task, 'duration_secs': 0.015869} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.260371] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-70ea6570-9557-484a-9bca-f0de34b780f4 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.269862] env[69367]: DEBUG oslo_vmware.api [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Waiting for the task: (returnval){ [ 735.269862] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52e8d8c4-42ea-3bc7-ba9b-a82037f45eee" [ 735.269862] env[69367]: _type = "Task" [ 735.269862] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.280036] env[69367]: DEBUG oslo_vmware.api [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52e8d8c4-42ea-3bc7-ba9b-a82037f45eee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.677478] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0173e738-73f9-4276-9c7a-21a3c2837203 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.686116] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9de1f962-8ada-4dd9-980b-ae079a98e637 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.722881] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86bc9c3c-a951-4e02-afbd-43b7cb4882f3 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.731751] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e642bc5-2d9d-438d-89f4-3083b16f343a {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.753525] env[69367]: DEBUG nova.compute.provider_tree [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Inventory has not changed in ProviderTree for provider: 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 735.784858] env[69367]: DEBUG oslo_vmware.api [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52e8d8c4-42ea-3bc7-ba9b-a82037f45eee, 'name': SearchDatastore_Task, 'duration_secs': 0.0173} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.785850] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 735.785850] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore2] 011ab7de-98a7-41fc-9e05-e71965c73c09/011ab7de-98a7-41fc-9e05-e71965c73c09.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 735.785850] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b476733f-183e-48e3-b692-1761cc2e84eb {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.797425] env[69367]: DEBUG oslo_vmware.api [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Waiting for the task: (returnval){ [ 735.797425] env[69367]: value = "task-4233916" [ 735.797425] env[69367]: _type = "Task" [ 735.797425] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.805965] env[69367]: DEBUG oslo_vmware.api [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Task: {'id': task-4233916, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.196020] env[69367]: DEBUG nova.compute.manager [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] Start spawning the instance on the hypervisor. {{(pid=69367) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 736.226888] env[69367]: DEBUG nova.virt.hardware [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-19T12:49:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-19T12:49:28Z,direct_url=,disk_format='vmdk',id=2b099420-9152-4d93-9609-4c9317824c11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cd7c200d5cd6461fb951580f8c764c42',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-19T12:49:29Z,virtual_size=,visibility=), allow threads: False {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 736.227191] env[69367]: DEBUG nova.virt.hardware [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Flavor limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 736.227407] env[69367]: DEBUG nova.virt.hardware [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Image limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 736.227542] env[69367]: DEBUG nova.virt.hardware [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Flavor pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 736.227696] env[69367]: DEBUG nova.virt.hardware [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Image pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 736.227890] env[69367]: DEBUG nova.virt.hardware [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 736.228192] env[69367]: DEBUG nova.virt.hardware [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 736.228392] env[69367]: DEBUG nova.virt.hardware [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 736.228574] env[69367]: DEBUG nova.virt.hardware [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Got 1 possible topologies {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 736.228765] env[69367]: DEBUG nova.virt.hardware [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 736.228991] env[69367]: DEBUG nova.virt.hardware [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 736.230204] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d08d30a-9274-4e49-930e-d2b40dd15420 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.240398] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69543d09-d4bb-4713-a972-015dc1441ab3 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.257540] env[69367]: DEBUG nova.scheduler.client.report [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Inventory has not changed for provider 19ddf8be-7305-4f70-8366-52a9957232e6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 736.309159] env[69367]: DEBUG oslo_vmware.api [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Task: {'id': task-4233916, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.518129] env[69367]: DEBUG oslo_vmware.rw_handles [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b7c0c8-67fa-649c-5553-bc3fcfa3f840/disk-0.vmdk. {{(pid=69367) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 736.519207] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0274c5ec-ed36-4394-9b9b-a05891319e03 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.525980] env[69367]: DEBUG oslo_vmware.rw_handles [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b7c0c8-67fa-649c-5553-bc3fcfa3f840/disk-0.vmdk is in state: ready. {{(pid=69367) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 736.526198] env[69367]: ERROR oslo_vmware.rw_handles [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b7c0c8-67fa-649c-5553-bc3fcfa3f840/disk-0.vmdk due to incomplete transfer. [ 736.526461] env[69367]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-4cf44582-0081-4dc3-9cf3-d33ac627a1e7 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.535865] env[69367]: DEBUG oslo_vmware.rw_handles [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b7c0c8-67fa-649c-5553-bc3fcfa3f840/disk-0.vmdk. {{(pid=69367) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 736.535865] env[69367]: DEBUG nova.virt.vmwareapi.images [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Uploaded image 38eb2f32-4943-4313-9612-8fd99366b9f1 to the Glance image server {{(pid=69367) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 736.537297] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Destroying the VM {{(pid=69367) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 736.537297] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-fb570532-a2f2-4be7-b510-3e36eef4a436 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.543699] env[69367]: DEBUG oslo_vmware.api [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Waiting for the task: (returnval){ [ 736.543699] env[69367]: value = "task-4233917" [ 736.543699] env[69367]: _type = "Task" [ 736.543699] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.555252] env[69367]: DEBUG oslo_vmware.api [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4233917, 'name': Destroy_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.743202] env[69367]: DEBUG nova.compute.manager [req-f18fb611-c7a2-4643-98d2-35ad945b1bbb req-428318aa-3677-4bac-bbd8-ada07b145e87 service nova] [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] Received event network-vif-plugged-b4b0bb3d-a7c4-42e8-99f2-9078bf5e2b6c {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 736.743457] env[69367]: DEBUG oslo_concurrency.lockutils [req-f18fb611-c7a2-4643-98d2-35ad945b1bbb req-428318aa-3677-4bac-bbd8-ada07b145e87 service nova] Acquiring lock "f8c07fa1-d27c-4d0f-847b-481477cd04bf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 736.743684] env[69367]: DEBUG oslo_concurrency.lockutils [req-f18fb611-c7a2-4643-98d2-35ad945b1bbb req-428318aa-3677-4bac-bbd8-ada07b145e87 service nova] Lock "f8c07fa1-d27c-4d0f-847b-481477cd04bf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 736.743864] env[69367]: DEBUG oslo_concurrency.lockutils [req-f18fb611-c7a2-4643-98d2-35ad945b1bbb req-428318aa-3677-4bac-bbd8-ada07b145e87 service nova] Lock "f8c07fa1-d27c-4d0f-847b-481477cd04bf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 736.744087] env[69367]: DEBUG nova.compute.manager [req-f18fb611-c7a2-4643-98d2-35ad945b1bbb req-428318aa-3677-4bac-bbd8-ada07b145e87 service nova] [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] No waiting events found dispatching network-vif-plugged-b4b0bb3d-a7c4-42e8-99f2-9078bf5e2b6c {{(pid=69367) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 736.744305] env[69367]: WARNING nova.compute.manager [req-f18fb611-c7a2-4643-98d2-35ad945b1bbb req-428318aa-3677-4bac-bbd8-ada07b145e87 service nova] [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] Received unexpected event network-vif-plugged-b4b0bb3d-a7c4-42e8-99f2-9078bf5e2b6c for instance with vm_state building and task_state spawning. [ 736.755080] env[69367]: DEBUG nova.network.neutron [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] Successfully updated port: b4b0bb3d-a7c4-42e8-99f2-9078bf5e2b6c {{(pid=69367) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 736.766455] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.587s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 736.767011] env[69367]: DEBUG nova.compute.manager [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Start building networks asynchronously for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 736.771874] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91e9d1ea-2ff5-4ae3-a101-b6ada7c4acd5 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.395s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 736.772131] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91e9d1ea-2ff5-4ae3-a101-b6ada7c4acd5 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 736.772317] env[69367]: INFO nova.compute.manager [None req-91e9d1ea-2ff5-4ae3-a101-b6ada7c4acd5 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab365570-ac29-4094-be4c-d49563a465c8] Successfully reverted task state from None on failure for instance. [ 736.777326] env[69367]: DEBUG oslo_concurrency.lockutils [None req-8cddd887-a3bc-4c75-aa6b-511f6208e23b tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.346s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 736.779925] env[69367]: INFO nova.compute.claims [None req-8cddd887-a3bc-4c75-aa6b-511f6208e23b tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: a0b99237-8f23-40ec-827f-af75961a096d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 736.787035] env[69367]: ERROR oslo_messaging.rpc.server [None req-91e9d1ea-2ff5-4ae3-a101-b6ada7c4acd5 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Exception during message handling: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 736.787035] env[69367]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 736.787035] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 736.787035] env[69367]: ERROR oslo_messaging.rpc.server yield [ 736.787035] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 736.787035] env[69367]: ERROR oslo_messaging.rpc.server self.set_inventory_for_provider( [ 736.787035] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 736.787035] env[69367]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 736.787035] env[69367]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-b74434d3-8f97-4429-96a0-96dc06eb3f1c"}]} [ 736.787035] env[69367]: ERROR oslo_messaging.rpc.server [ 736.787474] env[69367]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 736.787474] env[69367]: ERROR oslo_messaging.rpc.server [ 736.787474] env[69367]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 736.787474] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 174, in _process_incoming [ 736.787474] env[69367]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 736.787474] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 736.787474] env[69367]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 736.787474] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 736.787474] env[69367]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 736.787474] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 736.787474] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 736.787474] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 736.787474] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 736.787474] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 736.787474] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 736.787474] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 736.787474] env[69367]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 736.787474] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 167, in decorated_function [ 736.788089] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 736.788089] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 736.788089] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 736.788089] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 736.788089] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 736.788089] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 158, in decorated_function [ 736.788089] env[69367]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 736.788089] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1483, in decorated_function [ 736.788089] env[69367]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 736.788089] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 214, in decorated_function [ 736.788089] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 736.788089] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 736.788089] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 736.788089] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 736.788089] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 736.788089] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 204, in decorated_function [ 736.788089] env[69367]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 736.788089] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3388, in terminate_instance [ 736.788567] env[69367]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 736.788567] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 736.788567] env[69367]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 736.788567] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3383, in do_terminate_instance [ 736.788567] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 736.788567] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 736.788567] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 736.788567] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 736.788567] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 736.788567] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 736.788567] env[69367]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 736.788567] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 736.788567] env[69367]: ERROR oslo_messaging.rpc.server self._complete_deletion(context, instance) [ 736.788567] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 736.788567] env[69367]: ERROR oslo_messaging.rpc.server self._update_resource_tracker(context, instance) [ 736.788567] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 736.788567] env[69367]: ERROR oslo_messaging.rpc.server self.rt.update_usage(context, instance, instance.node) [ 736.788567] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 736.789047] env[69367]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 736.789047] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 736.789047] env[69367]: ERROR oslo_messaging.rpc.server self._update(context.elevated(), self.compute_nodes[nodename]) [ 736.789047] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 736.789047] env[69367]: ERROR oslo_messaging.rpc.server self._update_to_placement(context, compute_node, startup) [ 736.789047] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 736.789047] env[69367]: ERROR oslo_messaging.rpc.server return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 736.789047] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 736.789047] env[69367]: ERROR oslo_messaging.rpc.server return attempt.get(self._wrap_exception) [ 736.789047] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 736.789047] env[69367]: ERROR oslo_messaging.rpc.server six.reraise(self.value[0], self.value[1], self.value[2]) [ 736.789047] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 736.789047] env[69367]: ERROR oslo_messaging.rpc.server raise value [ 736.789047] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 736.789047] env[69367]: ERROR oslo_messaging.rpc.server attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 736.789047] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 736.789047] env[69367]: ERROR oslo_messaging.rpc.server self.reportclient.update_from_provider_tree( [ 736.789468] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 736.789468] env[69367]: ERROR oslo_messaging.rpc.server with catch_all(pd.uuid): [ 736.789468] env[69367]: ERROR oslo_messaging.rpc.server File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 736.789468] env[69367]: ERROR oslo_messaging.rpc.server self.gen.throw(typ, value, traceback) [ 736.789468] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 736.789468] env[69367]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderSyncFailed() [ 736.789468] env[69367]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 736.789468] env[69367]: ERROR oslo_messaging.rpc.server [ 736.811945] env[69367]: DEBUG oslo_vmware.api [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Task: {'id': task-4233916, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.632921} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 736.812283] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore2] 011ab7de-98a7-41fc-9e05-e71965c73c09/011ab7de-98a7-41fc-9e05-e71965c73c09.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 736.812504] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] Extending root virtual disk to 1048576 {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 736.812760] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-57443d04-3422-46cc-a2d0-e548ea156d71 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.823591] env[69367]: DEBUG oslo_vmware.api [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Waiting for the task: (returnval){ [ 736.823591] env[69367]: value = "task-4233918" [ 736.823591] env[69367]: _type = "Task" [ 736.823591] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.834875] env[69367]: DEBUG oslo_vmware.api [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Task: {'id': task-4233918, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.054322] env[69367]: DEBUG oslo_vmware.api [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4233917, 'name': Destroy_Task} progress is 100%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.261587] env[69367]: DEBUG oslo_concurrency.lockutils [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Acquiring lock "refresh_cache-f8c07fa1-d27c-4d0f-847b-481477cd04bf" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 737.261875] env[69367]: DEBUG oslo_concurrency.lockutils [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Acquired lock "refresh_cache-f8c07fa1-d27c-4d0f-847b-481477cd04bf" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 737.262327] env[69367]: DEBUG nova.network.neutron [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 737.273634] env[69367]: DEBUG nova.compute.utils [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Using /dev/sd instead of None {{(pid=69367) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 737.275024] env[69367]: DEBUG nova.compute.manager [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Allocating IP information in the background. {{(pid=69367) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 737.275238] env[69367]: DEBUG nova.network.neutron [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 837b4093-308b-440b-940d-fc0227a5c590] allocate_for_instance() {{(pid=69367) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 737.320903] env[69367]: DEBUG nova.policy [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5360c8b93a954bd0832ebadea6983ef1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f5bc3d470905412ea72a8eedb98e9e47', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69367) authorize /opt/stack/nova/nova/policy.py:192}} [ 737.334194] env[69367]: DEBUG oslo_vmware.api [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Task: {'id': task-4233918, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069687} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.334518] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] Extended root virtual disk {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 737.335371] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-754bb1e6-4448-435f-9896-fdfd8989465f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.356135] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] Reconfiguring VM instance instance-0000002a to attach disk [datastore2] 011ab7de-98a7-41fc-9e05-e71965c73c09/011ab7de-98a7-41fc-9e05-e71965c73c09.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 737.358442] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-99c2443b-477e-4077-9bd5-0521e1f16bd1 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.382277] env[69367]: DEBUG oslo_vmware.api [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Waiting for the task: (returnval){ [ 737.382277] env[69367]: value = "task-4233919" [ 737.382277] env[69367]: _type = "Task" [ 737.382277] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.391633] env[69367]: DEBUG oslo_vmware.api [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Task: {'id': task-4233919, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.556348] env[69367]: DEBUG oslo_vmware.api [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4233917, 'name': Destroy_Task} progress is 100%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.680463] env[69367]: DEBUG nova.network.neutron [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Successfully created port: 00ad3cfd-f282-442d-b152-85e841dd8a16 {{(pid=69367) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 737.778068] env[69367]: DEBUG nova.compute.manager [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Start building block device mappings for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 737.839083] env[69367]: DEBUG nova.network.neutron [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 737.900537] env[69367]: DEBUG oslo_vmware.api [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Task: {'id': task-4233919, 'name': ReconfigVM_Task, 'duration_secs': 0.329538} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.903821] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] Reconfigured VM instance instance-0000002a to attach disk [datastore2] 011ab7de-98a7-41fc-9e05-e71965c73c09/011ab7de-98a7-41fc-9e05-e71965c73c09.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 737.905711] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-181ef57d-8792-4369-8ecc-bf6abb3d8f7d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.917890] env[69367]: DEBUG oslo_vmware.api [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Waiting for the task: (returnval){ [ 737.917890] env[69367]: value = "task-4233920" [ 737.917890] env[69367]: _type = "Task" [ 737.917890] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.933977] env[69367]: DEBUG oslo_vmware.api [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Task: {'id': task-4233920, 'name': Rename_Task} progress is 14%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.067722] env[69367]: DEBUG oslo_vmware.api [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4233917, 'name': Destroy_Task, 'duration_secs': 1.347542} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.073284] env[69367]: INFO nova.virt.vmwareapi.vm_util [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Destroyed the VM [ 738.073663] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Deleting Snapshot of the VM instance {{(pid=69367) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 738.074615] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-9d41381d-fd50-46d4-ae0d-fe83b30343eb {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.083423] env[69367]: DEBUG oslo_vmware.api [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Waiting for the task: (returnval){ [ 738.083423] env[69367]: value = "task-4233921" [ 738.083423] env[69367]: _type = "Task" [ 738.083423] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.096217] env[69367]: DEBUG oslo_vmware.api [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4233921, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.122151] env[69367]: DEBUG nova.network.neutron [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] Updating instance_info_cache with network_info: [{"id": "b4b0bb3d-a7c4-42e8-99f2-9078bf5e2b6c", "address": "fa:16:3e:6c:36:d9", "network": {"id": "32a4b064-9027-4a4e-9230-c450c44581e8", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1766727980-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "82caa54483a54af1870eab2fb0d6ca2c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "88651df2-0506-4f6c-b868-dd30a81f2b1c", "external-id": "nsx-vlan-transportzone-366", "segmentation_id": 366, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4b0bb3d-a7", "ovs_interfaceid": "b4b0bb3d-a7c4-42e8-99f2-9078bf5e2b6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 738.357683] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7cdbc43-9008-4e7b-825d-1e50e2cd3057 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.366507] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3ea9e01-d771-48aa-bfd6-687277feb182 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.405774] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24784760-f353-47c4-aa3a-399f83478f6a {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.418886] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e93c885c-26c0-414a-a08b-762cdb8213ca {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.436999] env[69367]: DEBUG oslo_vmware.api [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Task: {'id': task-4233920, 'name': Rename_Task, 'duration_secs': 0.167663} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.448320] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] Powering on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 738.448902] env[69367]: DEBUG nova.compute.provider_tree [None req-8cddd887-a3bc-4c75-aa6b-511f6208e23b tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 738.450361] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5ad539ee-d164-4c53-9c5c-1744050db4cc {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.457782] env[69367]: DEBUG oslo_vmware.api [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Waiting for the task: (returnval){ [ 738.457782] env[69367]: value = "task-4233922" [ 738.457782] env[69367]: _type = "Task" [ 738.457782] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.466581] env[69367]: DEBUG oslo_vmware.api [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Task: {'id': task-4233922, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.595117] env[69367]: DEBUG oslo_vmware.api [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4233921, 'name': RemoveSnapshot_Task, 'duration_secs': 0.413448} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.595117] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Deleted Snapshot of the VM instance {{(pid=69367) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 738.595340] env[69367]: DEBUG nova.compute.manager [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Checking state {{(pid=69367) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 738.596905] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1f19c2b-0866-472f-a5f0-2350b5e7beb6 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.625602] env[69367]: DEBUG oslo_concurrency.lockutils [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Releasing lock "refresh_cache-f8c07fa1-d27c-4d0f-847b-481477cd04bf" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 738.626048] env[69367]: DEBUG nova.compute.manager [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] Instance network_info: |[{"id": "b4b0bb3d-a7c4-42e8-99f2-9078bf5e2b6c", "address": "fa:16:3e:6c:36:d9", "network": {"id": "32a4b064-9027-4a4e-9230-c450c44581e8", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1766727980-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "82caa54483a54af1870eab2fb0d6ca2c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "88651df2-0506-4f6c-b868-dd30a81f2b1c", "external-id": "nsx-vlan-transportzone-366", "segmentation_id": 366, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4b0bb3d-a7", "ovs_interfaceid": "b4b0bb3d-a7c4-42e8-99f2-9078bf5e2b6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69367) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 738.626644] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6c:36:d9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '88651df2-0506-4f6c-b868-dd30a81f2b1c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b4b0bb3d-a7c4-42e8-99f2-9078bf5e2b6c', 'vif_model': 'vmxnet3'}] {{(pid=69367) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 738.634468] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Creating folder: Project (82caa54483a54af1870eab2fb0d6ca2c). Parent ref: group-v837645. {{(pid=69367) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 738.634843] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-87f1954a-0bfa-4d8c-b44b-910ae60e1456 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.647370] env[69367]: INFO nova.virt.vmwareapi.vm_util [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Created folder: Project (82caa54483a54af1870eab2fb0d6ca2c) in parent group-v837645. [ 738.647736] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Creating folder: Instances. Parent ref: group-v837712. {{(pid=69367) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 738.648125] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a68f9bed-d533-4527-8fe3-8fd3e02a92c2 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.661102] env[69367]: INFO nova.virt.vmwareapi.vm_util [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Created folder: Instances in parent group-v837712. [ 738.661394] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 738.661606] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] Creating VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 738.661813] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7c0d7f0e-7969-48d4-a23e-d5361136e25f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.685278] env[69367]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 738.685278] env[69367]: value = "task-4233925" [ 738.685278] env[69367]: _type = "Task" [ 738.685278] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.695050] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4233925, 'name': CreateVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.769312] env[69367]: DEBUG nova.compute.manager [req-508be30b-14fd-4f8d-bf14-8784d4b6c13c req-d1c7f59c-1a29-43c3-a926-9e56a5c0e678 service nova] [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] Received event network-changed-b4b0bb3d-a7c4-42e8-99f2-9078bf5e2b6c {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 738.769312] env[69367]: DEBUG nova.compute.manager [req-508be30b-14fd-4f8d-bf14-8784d4b6c13c req-d1c7f59c-1a29-43c3-a926-9e56a5c0e678 service nova] [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] Refreshing instance network info cache due to event network-changed-b4b0bb3d-a7c4-42e8-99f2-9078bf5e2b6c. {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11772}} [ 738.769716] env[69367]: DEBUG oslo_concurrency.lockutils [req-508be30b-14fd-4f8d-bf14-8784d4b6c13c req-d1c7f59c-1a29-43c3-a926-9e56a5c0e678 service nova] Acquiring lock "refresh_cache-f8c07fa1-d27c-4d0f-847b-481477cd04bf" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 738.769905] env[69367]: DEBUG oslo_concurrency.lockutils [req-508be30b-14fd-4f8d-bf14-8784d4b6c13c req-d1c7f59c-1a29-43c3-a926-9e56a5c0e678 service nova] Acquired lock "refresh_cache-f8c07fa1-d27c-4d0f-847b-481477cd04bf" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 738.770178] env[69367]: DEBUG nova.network.neutron [req-508be30b-14fd-4f8d-bf14-8784d4b6c13c req-d1c7f59c-1a29-43c3-a926-9e56a5c0e678 service nova] [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] Refreshing network info cache for port b4b0bb3d-a7c4-42e8-99f2-9078bf5e2b6c {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 738.791406] env[69367]: DEBUG nova.compute.manager [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Start spawning the instance on the hypervisor. {{(pid=69367) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 738.818994] env[69367]: DEBUG nova.virt.hardware [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-19T12:49:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-19T12:49:28Z,direct_url=,disk_format='vmdk',id=2b099420-9152-4d93-9609-4c9317824c11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cd7c200d5cd6461fb951580f8c764c42',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-19T12:49:29Z,virtual_size=,visibility=), allow threads: False {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 738.819363] env[69367]: DEBUG nova.virt.hardware [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Flavor limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 738.819609] env[69367]: DEBUG nova.virt.hardware [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Image limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 738.819866] env[69367]: DEBUG nova.virt.hardware [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Flavor pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 738.820135] env[69367]: DEBUG nova.virt.hardware [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Image pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 738.820373] env[69367]: DEBUG nova.virt.hardware [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 738.820664] env[69367]: DEBUG nova.virt.hardware [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 738.820976] env[69367]: DEBUG nova.virt.hardware [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 738.821173] env[69367]: DEBUG nova.virt.hardware [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Got 1 possible topologies {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 738.821426] env[69367]: DEBUG nova.virt.hardware [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 738.821678] env[69367]: DEBUG nova.virt.hardware [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 738.822802] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af8dcdf5-855f-4ac1-bc2e-f2e91ee7ee39 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.832667] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-873e44a0-6a38-4068-9f67-8f3cfedc7ac7 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.968596] env[69367]: DEBUG oslo_vmware.api [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Task: {'id': task-4233922, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.974767] env[69367]: ERROR nova.scheduler.client.report [None req-8cddd887-a3bc-4c75-aa6b-511f6208e23b tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [req-4daa612c-7360-42c2-ad63-dfd23659f490] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-4daa612c-7360-42c2-ad63-dfd23659f490"}]} [ 738.975180] env[69367]: DEBUG oslo_concurrency.lockutils [None req-8cddd887-a3bc-4c75-aa6b-511f6208e23b tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.198s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 738.975815] env[69367]: ERROR nova.compute.manager [None req-8cddd887-a3bc-4c75-aa6b-511f6208e23b tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: a0b99237-8f23-40ec-827f-af75961a096d] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 738.975815] env[69367]: ERROR nova.compute.manager [instance: a0b99237-8f23-40ec-827f-af75961a096d] Traceback (most recent call last): [ 738.975815] env[69367]: ERROR nova.compute.manager [instance: a0b99237-8f23-40ec-827f-af75961a096d] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 738.975815] env[69367]: ERROR nova.compute.manager [instance: a0b99237-8f23-40ec-827f-af75961a096d] yield [ 738.975815] env[69367]: ERROR nova.compute.manager [instance: a0b99237-8f23-40ec-827f-af75961a096d] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 738.975815] env[69367]: ERROR nova.compute.manager [instance: a0b99237-8f23-40ec-827f-af75961a096d] self.set_inventory_for_provider( [ 738.975815] env[69367]: ERROR nova.compute.manager [instance: a0b99237-8f23-40ec-827f-af75961a096d] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 738.975815] env[69367]: ERROR nova.compute.manager [instance: a0b99237-8f23-40ec-827f-af75961a096d] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 738.976046] env[69367]: ERROR nova.compute.manager [instance: a0b99237-8f23-40ec-827f-af75961a096d] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-4daa612c-7360-42c2-ad63-dfd23659f490"}]} [ 738.976046] env[69367]: ERROR nova.compute.manager [instance: a0b99237-8f23-40ec-827f-af75961a096d] [ 738.976046] env[69367]: ERROR nova.compute.manager [instance: a0b99237-8f23-40ec-827f-af75961a096d] During handling of the above exception, another exception occurred: [ 738.976046] env[69367]: ERROR nova.compute.manager [instance: a0b99237-8f23-40ec-827f-af75961a096d] [ 738.976046] env[69367]: ERROR nova.compute.manager [instance: a0b99237-8f23-40ec-827f-af75961a096d] Traceback (most recent call last): [ 738.976046] env[69367]: ERROR nova.compute.manager [instance: a0b99237-8f23-40ec-827f-af75961a096d] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 738.976046] env[69367]: ERROR nova.compute.manager [instance: a0b99237-8f23-40ec-827f-af75961a096d] with self.rt.instance_claim(context, instance, node, allocs, [ 738.976046] env[69367]: ERROR nova.compute.manager [instance: a0b99237-8f23-40ec-827f-af75961a096d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 738.976046] env[69367]: ERROR nova.compute.manager [instance: a0b99237-8f23-40ec-827f-af75961a096d] return f(*args, **kwargs) [ 738.976303] env[69367]: ERROR nova.compute.manager [instance: a0b99237-8f23-40ec-827f-af75961a096d] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 738.976303] env[69367]: ERROR nova.compute.manager [instance: a0b99237-8f23-40ec-827f-af75961a096d] self._update(elevated, cn) [ 738.976303] env[69367]: ERROR nova.compute.manager [instance: a0b99237-8f23-40ec-827f-af75961a096d] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 738.976303] env[69367]: ERROR nova.compute.manager [instance: a0b99237-8f23-40ec-827f-af75961a096d] self._update_to_placement(context, compute_node, startup) [ 738.976303] env[69367]: ERROR nova.compute.manager [instance: a0b99237-8f23-40ec-827f-af75961a096d] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 738.976303] env[69367]: ERROR nova.compute.manager [instance: a0b99237-8f23-40ec-827f-af75961a096d] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 738.976303] env[69367]: ERROR nova.compute.manager [instance: a0b99237-8f23-40ec-827f-af75961a096d] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 738.976303] env[69367]: ERROR nova.compute.manager [instance: a0b99237-8f23-40ec-827f-af75961a096d] return attempt.get(self._wrap_exception) [ 738.976303] env[69367]: ERROR nova.compute.manager [instance: a0b99237-8f23-40ec-827f-af75961a096d] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 738.976303] env[69367]: ERROR nova.compute.manager [instance: a0b99237-8f23-40ec-827f-af75961a096d] six.reraise(self.value[0], self.value[1], self.value[2]) [ 738.976303] env[69367]: ERROR nova.compute.manager [instance: a0b99237-8f23-40ec-827f-af75961a096d] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 738.976303] env[69367]: ERROR nova.compute.manager [instance: a0b99237-8f23-40ec-827f-af75961a096d] raise value [ 738.976303] env[69367]: ERROR nova.compute.manager [instance: a0b99237-8f23-40ec-827f-af75961a096d] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 738.976620] env[69367]: ERROR nova.compute.manager [instance: a0b99237-8f23-40ec-827f-af75961a096d] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 738.976620] env[69367]: ERROR nova.compute.manager [instance: a0b99237-8f23-40ec-827f-af75961a096d] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 738.976620] env[69367]: ERROR nova.compute.manager [instance: a0b99237-8f23-40ec-827f-af75961a096d] self.reportclient.update_from_provider_tree( [ 738.976620] env[69367]: ERROR nova.compute.manager [instance: a0b99237-8f23-40ec-827f-af75961a096d] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 738.976620] env[69367]: ERROR nova.compute.manager [instance: a0b99237-8f23-40ec-827f-af75961a096d] with catch_all(pd.uuid): [ 738.976620] env[69367]: ERROR nova.compute.manager [instance: a0b99237-8f23-40ec-827f-af75961a096d] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 738.976620] env[69367]: ERROR nova.compute.manager [instance: a0b99237-8f23-40ec-827f-af75961a096d] self.gen.throw(typ, value, traceback) [ 738.976620] env[69367]: ERROR nova.compute.manager [instance: a0b99237-8f23-40ec-827f-af75961a096d] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 738.976620] env[69367]: ERROR nova.compute.manager [instance: a0b99237-8f23-40ec-827f-af75961a096d] raise exception.ResourceProviderSyncFailed() [ 738.976620] env[69367]: ERROR nova.compute.manager [instance: a0b99237-8f23-40ec-827f-af75961a096d] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 738.976620] env[69367]: ERROR nova.compute.manager [instance: a0b99237-8f23-40ec-827f-af75961a096d] [ 738.976927] env[69367]: DEBUG nova.compute.utils [None req-8cddd887-a3bc-4c75-aa6b-511f6208e23b tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: a0b99237-8f23-40ec-827f-af75961a096d] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 738.977692] env[69367]: DEBUG oslo_concurrency.lockutils [None req-b4416836-0f76-4b31-aa99-b0fec3a75acb tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.048s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 738.977913] env[69367]: DEBUG nova.objects.instance [None req-b4416836-0f76-4b31-aa99-b0fec3a75acb tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Lazy-loading 'resources' on Instance uuid 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57 {{(pid=69367) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 738.979353] env[69367]: DEBUG nova.compute.manager [None req-8cddd887-a3bc-4c75-aa6b-511f6208e23b tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: a0b99237-8f23-40ec-827f-af75961a096d] Build of instance a0b99237-8f23-40ec-827f-af75961a096d was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 738.980046] env[69367]: DEBUG nova.compute.manager [None req-8cddd887-a3bc-4c75-aa6b-511f6208e23b tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: a0b99237-8f23-40ec-827f-af75961a096d] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 738.980046] env[69367]: DEBUG oslo_concurrency.lockutils [None req-8cddd887-a3bc-4c75-aa6b-511f6208e23b tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Acquiring lock "refresh_cache-a0b99237-8f23-40ec-827f-af75961a096d" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 738.980170] env[69367]: DEBUG oslo_concurrency.lockutils [None req-8cddd887-a3bc-4c75-aa6b-511f6208e23b tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Acquired lock "refresh_cache-a0b99237-8f23-40ec-827f-af75961a096d" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 738.980294] env[69367]: DEBUG nova.network.neutron [None req-8cddd887-a3bc-4c75-aa6b-511f6208e23b tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: a0b99237-8f23-40ec-827f-af75961a096d] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 739.108990] env[69367]: INFO nova.compute.manager [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Shelve offloading [ 739.197128] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4233925, 'name': CreateVM_Task, 'duration_secs': 0.402682} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.197367] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] Created VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 739.198107] env[69367]: DEBUG oslo_concurrency.lockutils [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 739.198291] env[69367]: DEBUG oslo_concurrency.lockutils [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 739.198613] env[69367]: DEBUG oslo_concurrency.lockutils [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 739.198890] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bd802458-d396-4670-b4e7-d547a18b03d7 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.205061] env[69367]: DEBUG oslo_vmware.api [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Waiting for the task: (returnval){ [ 739.205061] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52439cfe-c7c2-65fd-258c-4eb267877151" [ 739.205061] env[69367]: _type = "Task" [ 739.205061] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.216581] env[69367]: DEBUG oslo_vmware.api [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52439cfe-c7c2-65fd-258c-4eb267877151, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.476920] env[69367]: DEBUG oslo_vmware.api [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Task: {'id': task-4233922, 'name': PowerOnVM_Task, 'duration_secs': 0.555078} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.480359] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] Powered on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 739.480636] env[69367]: INFO nova.compute.manager [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] Took 5.95 seconds to spawn the instance on the hypervisor. [ 739.480891] env[69367]: DEBUG nova.compute.manager [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] Checking state {{(pid=69367) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 739.481810] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-866cfe46-d88c-4a71-90fc-88ca5939cf0b {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.507351] env[69367]: DEBUG nova.network.neutron [None req-8cddd887-a3bc-4c75-aa6b-511f6208e23b tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: a0b99237-8f23-40ec-827f-af75961a096d] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 739.524115] env[69367]: DEBUG nova.scheduler.client.report [None req-b4416836-0f76-4b31-aa99-b0fec3a75acb tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 739.552777] env[69367]: DEBUG nova.scheduler.client.report [None req-b4416836-0f76-4b31-aa99-b0fec3a75acb tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 739.552874] env[69367]: DEBUG nova.compute.provider_tree [None req-b4416836-0f76-4b31-aa99-b0fec3a75acb tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 739.574980] env[69367]: DEBUG nova.scheduler.client.report [None req-b4416836-0f76-4b31-aa99-b0fec3a75acb tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 739.601895] env[69367]: DEBUG nova.scheduler.client.report [None req-b4416836-0f76-4b31-aa99-b0fec3a75acb tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 739.613457] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Powering off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 739.613803] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-522f94a4-c1fa-4ddd-81ab-a34bf4698b04 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.617509] env[69367]: DEBUG nova.network.neutron [None req-8cddd887-a3bc-4c75-aa6b-511f6208e23b tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: a0b99237-8f23-40ec-827f-af75961a096d] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 739.622898] env[69367]: DEBUG oslo_vmware.api [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Waiting for the task: (returnval){ [ 739.622898] env[69367]: value = "task-4233926" [ 739.622898] env[69367]: _type = "Task" [ 739.622898] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.631923] env[69367]: DEBUG nova.network.neutron [req-508be30b-14fd-4f8d-bf14-8784d4b6c13c req-d1c7f59c-1a29-43c3-a926-9e56a5c0e678 service nova] [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] Updated VIF entry in instance network info cache for port b4b0bb3d-a7c4-42e8-99f2-9078bf5e2b6c. {{(pid=69367) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 739.631923] env[69367]: DEBUG nova.network.neutron [req-508be30b-14fd-4f8d-bf14-8784d4b6c13c req-d1c7f59c-1a29-43c3-a926-9e56a5c0e678 service nova] [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] Updating instance_info_cache with network_info: [{"id": "b4b0bb3d-a7c4-42e8-99f2-9078bf5e2b6c", "address": "fa:16:3e:6c:36:d9", "network": {"id": "32a4b064-9027-4a4e-9230-c450c44581e8", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1766727980-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "82caa54483a54af1870eab2fb0d6ca2c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "88651df2-0506-4f6c-b868-dd30a81f2b1c", "external-id": "nsx-vlan-transportzone-366", "segmentation_id": 366, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4b0bb3d-a7", "ovs_interfaceid": "b4b0bb3d-a7c4-42e8-99f2-9078bf5e2b6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 739.641470] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] VM already powered off {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 739.641584] env[69367]: DEBUG nova.compute.manager [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Checking state {{(pid=69367) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 739.643417] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b9c5541-acea-494b-a4bc-142233a254c6 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.647749] env[69367]: DEBUG nova.network.neutron [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Successfully updated port: 00ad3cfd-f282-442d-b152-85e841dd8a16 {{(pid=69367) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 739.658907] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Acquiring lock "refresh_cache-8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 739.659119] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Acquired lock "refresh_cache-8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 739.659307] env[69367]: DEBUG nova.network.neutron [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 739.718499] env[69367]: DEBUG oslo_vmware.api [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52439cfe-c7c2-65fd-258c-4eb267877151, 'name': SearchDatastore_Task, 'duration_secs': 0.010383} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.721533] env[69367]: DEBUG oslo_concurrency.lockutils [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 739.721809] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] Processing image 2b099420-9152-4d93-9609-4c9317824c11 {{(pid=69367) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 739.722149] env[69367]: DEBUG oslo_concurrency.lockutils [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 739.722318] env[69367]: DEBUG oslo_concurrency.lockutils [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 739.722504] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 739.722987] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-45f7570d-2ea2-458a-b5e5-a3d7adadd049 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.733199] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 739.733472] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69367) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 739.736990] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b4152ab7-82a6-411e-9db0-b668548a6b5e {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.744283] env[69367]: DEBUG oslo_vmware.api [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Waiting for the task: (returnval){ [ 739.744283] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]524216be-a642-4178-aba5-007162f45281" [ 739.744283] env[69367]: _type = "Task" [ 739.744283] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.756329] env[69367]: DEBUG oslo_vmware.api [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]524216be-a642-4178-aba5-007162f45281, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.019510] env[69367]: INFO nova.compute.manager [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] Took 28.82 seconds to build instance. [ 740.070356] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73164933-1998-43e4-8d65-86475639c782 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.078628] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71efd4e7-b002-4094-a2d0-027459f80aad {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.111376] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9240634-d82c-4427-a64c-1d1dd2c0d97b {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.119839] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06bb7570-d513-4326-90c1-63828261fa9b {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.124600] env[69367]: DEBUG oslo_concurrency.lockutils [None req-8cddd887-a3bc-4c75-aa6b-511f6208e23b tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Releasing lock "refresh_cache-a0b99237-8f23-40ec-827f-af75961a096d" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 740.124742] env[69367]: DEBUG nova.compute.manager [None req-8cddd887-a3bc-4c75-aa6b-511f6208e23b tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 740.124925] env[69367]: DEBUG nova.compute.manager [None req-8cddd887-a3bc-4c75-aa6b-511f6208e23b tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: a0b99237-8f23-40ec-827f-af75961a096d] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 740.125109] env[69367]: DEBUG nova.network.neutron [None req-8cddd887-a3bc-4c75-aa6b-511f6208e23b tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: a0b99237-8f23-40ec-827f-af75961a096d] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 740.137349] env[69367]: DEBUG oslo_concurrency.lockutils [req-508be30b-14fd-4f8d-bf14-8784d4b6c13c req-d1c7f59c-1a29-43c3-a926-9e56a5c0e678 service nova] Releasing lock "refresh_cache-f8c07fa1-d27c-4d0f-847b-481477cd04bf" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 740.137957] env[69367]: DEBUG nova.compute.provider_tree [None req-b4416836-0f76-4b31-aa99-b0fec3a75acb tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 740.154565] env[69367]: DEBUG nova.network.neutron [None req-8cddd887-a3bc-4c75-aa6b-511f6208e23b tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: a0b99237-8f23-40ec-827f-af75961a096d] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 740.154565] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Acquiring lock "refresh_cache-837b4093-308b-440b-940d-fc0227a5c590" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 740.154565] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Acquired lock "refresh_cache-837b4093-308b-440b-940d-fc0227a5c590" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 740.154565] env[69367]: DEBUG nova.network.neutron [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 740.255631] env[69367]: DEBUG oslo_vmware.api [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]524216be-a642-4178-aba5-007162f45281, 'name': SearchDatastore_Task, 'duration_secs': 0.01153} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.259369] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b991a630-8ee0-4691-bbe6-98ffd70bc45c {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.267116] env[69367]: DEBUG oslo_vmware.api [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Waiting for the task: (returnval){ [ 740.267116] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]527046bf-1e08-e684-1662-bc2b70ac4696" [ 740.267116] env[69367]: _type = "Task" [ 740.267116] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.277521] env[69367]: DEBUG oslo_vmware.api [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]527046bf-1e08-e684-1662-bc2b70ac4696, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.448756] env[69367]: DEBUG nova.network.neutron [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Updating instance_info_cache with network_info: [{"id": "fa738fa1-0be4-4506-8e42-73671661dee1", "address": "fa:16:3e:c7:7c:7c", "network": {"id": "0b0a82c2-1a70-4174-a75e-5863d3505b2c", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1091682254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.159", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "26a89ab4163e4b9a801dcbf11c953cf3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfa738fa1-0b", "ovs_interfaceid": "fa738fa1-0be4-4506-8e42-73671661dee1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 740.524918] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a71f56fb-68da-4432-a59e-3e149dd849d3 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Lock "011ab7de-98a7-41fc-9e05-e71965c73c09" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 50.488s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 740.656947] env[69367]: DEBUG nova.network.neutron [None req-8cddd887-a3bc-4c75-aa6b-511f6208e23b tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: a0b99237-8f23-40ec-827f-af75961a096d] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 740.662097] env[69367]: ERROR nova.scheduler.client.report [None req-b4416836-0f76-4b31-aa99-b0fec3a75acb tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] [req-197fdfd3-1285-4f7a-a5a2-854dc64a12d5] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-197fdfd3-1285-4f7a-a5a2-854dc64a12d5"}]} [ 740.662872] env[69367]: DEBUG oslo_concurrency.lockutils [None req-b4416836-0f76-4b31-aa99-b0fec3a75acb tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.685s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 740.663156] env[69367]: ERROR nova.compute.manager [None req-b4416836-0f76-4b31-aa99-b0fec3a75acb tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] Setting instance vm_state to ERROR: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 740.663156] env[69367]: ERROR nova.compute.manager [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] Traceback (most recent call last): [ 740.663156] env[69367]: ERROR nova.compute.manager [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 740.663156] env[69367]: ERROR nova.compute.manager [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] yield [ 740.663156] env[69367]: ERROR nova.compute.manager [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 740.663156] env[69367]: ERROR nova.compute.manager [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] self.set_inventory_for_provider( [ 740.663156] env[69367]: ERROR nova.compute.manager [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 740.663156] env[69367]: ERROR nova.compute.manager [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 740.663519] env[69367]: ERROR nova.compute.manager [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-197fdfd3-1285-4f7a-a5a2-854dc64a12d5"}]} [ 740.663519] env[69367]: ERROR nova.compute.manager [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] [ 740.663519] env[69367]: ERROR nova.compute.manager [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] During handling of the above exception, another exception occurred: [ 740.663519] env[69367]: ERROR nova.compute.manager [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] [ 740.663519] env[69367]: ERROR nova.compute.manager [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] Traceback (most recent call last): [ 740.663519] env[69367]: ERROR nova.compute.manager [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 740.663519] env[69367]: ERROR nova.compute.manager [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] self._delete_instance(context, instance, bdms) [ 740.663519] env[69367]: ERROR nova.compute.manager [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 740.663519] env[69367]: ERROR nova.compute.manager [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] self._complete_deletion(context, instance) [ 740.663922] env[69367]: ERROR nova.compute.manager [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 740.663922] env[69367]: ERROR nova.compute.manager [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] self._update_resource_tracker(context, instance) [ 740.663922] env[69367]: ERROR nova.compute.manager [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 740.663922] env[69367]: ERROR nova.compute.manager [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] self.rt.update_usage(context, instance, instance.node) [ 740.663922] env[69367]: ERROR nova.compute.manager [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 740.663922] env[69367]: ERROR nova.compute.manager [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] return f(*args, **kwargs) [ 740.663922] env[69367]: ERROR nova.compute.manager [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 740.663922] env[69367]: ERROR nova.compute.manager [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] self._update(context.elevated(), self.compute_nodes[nodename]) [ 740.663922] env[69367]: ERROR nova.compute.manager [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 740.663922] env[69367]: ERROR nova.compute.manager [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] self._update_to_placement(context, compute_node, startup) [ 740.663922] env[69367]: ERROR nova.compute.manager [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 740.663922] env[69367]: ERROR nova.compute.manager [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 740.664241] env[69367]: ERROR nova.compute.manager [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 740.664241] env[69367]: ERROR nova.compute.manager [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] return attempt.get(self._wrap_exception) [ 740.664241] env[69367]: ERROR nova.compute.manager [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 740.664241] env[69367]: ERROR nova.compute.manager [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] six.reraise(self.value[0], self.value[1], self.value[2]) [ 740.664241] env[69367]: ERROR nova.compute.manager [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 740.664241] env[69367]: ERROR nova.compute.manager [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] raise value [ 740.664241] env[69367]: ERROR nova.compute.manager [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 740.664241] env[69367]: ERROR nova.compute.manager [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 740.664241] env[69367]: ERROR nova.compute.manager [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 740.664241] env[69367]: ERROR nova.compute.manager [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] self.reportclient.update_from_provider_tree( [ 740.664241] env[69367]: ERROR nova.compute.manager [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 740.664241] env[69367]: ERROR nova.compute.manager [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] with catch_all(pd.uuid): [ 740.664241] env[69367]: ERROR nova.compute.manager [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 740.664565] env[69367]: ERROR nova.compute.manager [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] self.gen.throw(typ, value, traceback) [ 740.664565] env[69367]: ERROR nova.compute.manager [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 740.664565] env[69367]: ERROR nova.compute.manager [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] raise exception.ResourceProviderSyncFailed() [ 740.664565] env[69367]: ERROR nova.compute.manager [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 740.664565] env[69367]: ERROR nova.compute.manager [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] [ 740.669725] env[69367]: DEBUG oslo_concurrency.lockutils [None req-6d12701b-472c-4f21-b3e9-0df43501af60 tempest-ServerActionsV293TestJSON-1803979304 tempest-ServerActionsV293TestJSON-1803979304-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.379s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 740.671420] env[69367]: INFO nova.compute.claims [None req-6d12701b-472c-4f21-b3e9-0df43501af60 tempest-ServerActionsV293TestJSON-1803979304 tempest-ServerActionsV293TestJSON-1803979304-project-member] [instance: e6a9b69c-e00d-4260-84c7-2d98ce80ead0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 740.711023] env[69367]: DEBUG nova.network.neutron [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 740.779529] env[69367]: DEBUG oslo_vmware.api [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]527046bf-1e08-e684-1662-bc2b70ac4696, 'name': SearchDatastore_Task, 'duration_secs': 0.009948} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.779746] env[69367]: DEBUG oslo_concurrency.lockutils [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 740.780093] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore2] f8c07fa1-d27c-4d0f-847b-481477cd04bf/f8c07fa1-d27c-4d0f-847b-481477cd04bf.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 740.780603] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-29610fa1-47d9-4982-831e-ee38e286ed7c {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.789633] env[69367]: DEBUG oslo_vmware.api [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Waiting for the task: (returnval){ [ 740.789633] env[69367]: value = "task-4233927" [ 740.789633] env[69367]: _type = "Task" [ 740.789633] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.799441] env[69367]: DEBUG oslo_vmware.api [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Task: {'id': task-4233927, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.851089] env[69367]: DEBUG nova.compute.manager [req-5ecd260b-8072-4970-8e9d-89d0300c366c req-96550871-ea22-4bee-b9e7-a497a0b0c245 service nova] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Received event network-vif-plugged-00ad3cfd-f282-442d-b152-85e841dd8a16 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 740.851318] env[69367]: DEBUG oslo_concurrency.lockutils [req-5ecd260b-8072-4970-8e9d-89d0300c366c req-96550871-ea22-4bee-b9e7-a497a0b0c245 service nova] Acquiring lock "837b4093-308b-440b-940d-fc0227a5c590-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 740.851519] env[69367]: DEBUG oslo_concurrency.lockutils [req-5ecd260b-8072-4970-8e9d-89d0300c366c req-96550871-ea22-4bee-b9e7-a497a0b0c245 service nova] Lock "837b4093-308b-440b-940d-fc0227a5c590-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 740.851690] env[69367]: DEBUG oslo_concurrency.lockutils [req-5ecd260b-8072-4970-8e9d-89d0300c366c req-96550871-ea22-4bee-b9e7-a497a0b0c245 service nova] Lock "837b4093-308b-440b-940d-fc0227a5c590-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 740.851854] env[69367]: DEBUG nova.compute.manager [req-5ecd260b-8072-4970-8e9d-89d0300c366c req-96550871-ea22-4bee-b9e7-a497a0b0c245 service nova] [instance: 837b4093-308b-440b-940d-fc0227a5c590] No waiting events found dispatching network-vif-plugged-00ad3cfd-f282-442d-b152-85e841dd8a16 {{(pid=69367) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 740.853909] env[69367]: WARNING nova.compute.manager [req-5ecd260b-8072-4970-8e9d-89d0300c366c req-96550871-ea22-4bee-b9e7-a497a0b0c245 service nova] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Received unexpected event network-vif-plugged-00ad3cfd-f282-442d-b152-85e841dd8a16 for instance with vm_state building and task_state spawning. [ 740.854215] env[69367]: DEBUG nova.compute.manager [req-5ecd260b-8072-4970-8e9d-89d0300c366c req-96550871-ea22-4bee-b9e7-a497a0b0c245 service nova] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Received event network-changed-00ad3cfd-f282-442d-b152-85e841dd8a16 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 740.854385] env[69367]: DEBUG nova.compute.manager [req-5ecd260b-8072-4970-8e9d-89d0300c366c req-96550871-ea22-4bee-b9e7-a497a0b0c245 service nova] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Refreshing instance network info cache due to event network-changed-00ad3cfd-f282-442d-b152-85e841dd8a16. {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11772}} [ 740.854585] env[69367]: DEBUG oslo_concurrency.lockutils [req-5ecd260b-8072-4970-8e9d-89d0300c366c req-96550871-ea22-4bee-b9e7-a497a0b0c245 service nova] Acquiring lock "refresh_cache-837b4093-308b-440b-940d-fc0227a5c590" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 740.880247] env[69367]: DEBUG nova.network.neutron [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Updating instance_info_cache with network_info: [{"id": "00ad3cfd-f282-442d-b152-85e841dd8a16", "address": "fa:16:3e:17:02:f1", "network": {"id": "a8e5c1a6-2526-4042-8a8d-fdb54dbe7bf9", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1429177141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "f5bc3d470905412ea72a8eedb98e9e47", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e6f11c0d-c73a-47f5-b02e-47bff48da0e4", "external-id": "nsx-vlan-transportzone-345", "segmentation_id": 345, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap00ad3cfd-f2", "ovs_interfaceid": "00ad3cfd-f282-442d-b152-85e841dd8a16", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 740.953207] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Releasing lock "refresh_cache-8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 741.030054] env[69367]: DEBUG nova.compute.manager [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 741.160785] env[69367]: INFO nova.compute.manager [None req-8cddd887-a3bc-4c75-aa6b-511f6208e23b tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: a0b99237-8f23-40ec-827f-af75961a096d] Took 1.04 seconds to deallocate network for instance. [ 741.180056] env[69367]: DEBUG oslo_concurrency.lockutils [None req-b4416836-0f76-4b31-aa99-b0fec3a75acb tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Lock "10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.883s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 741.300531] env[69367]: DEBUG oslo_vmware.api [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Task: {'id': task-4233927, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.383678] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Releasing lock "refresh_cache-837b4093-308b-440b-940d-fc0227a5c590" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 741.384136] env[69367]: DEBUG nova.compute.manager [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Instance network_info: |[{"id": "00ad3cfd-f282-442d-b152-85e841dd8a16", "address": "fa:16:3e:17:02:f1", "network": {"id": "a8e5c1a6-2526-4042-8a8d-fdb54dbe7bf9", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1429177141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "f5bc3d470905412ea72a8eedb98e9e47", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e6f11c0d-c73a-47f5-b02e-47bff48da0e4", "external-id": "nsx-vlan-transportzone-345", "segmentation_id": 345, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap00ad3cfd-f2", "ovs_interfaceid": "00ad3cfd-f282-442d-b152-85e841dd8a16", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69367) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 741.384554] env[69367]: DEBUG oslo_concurrency.lockutils [req-5ecd260b-8072-4970-8e9d-89d0300c366c req-96550871-ea22-4bee-b9e7-a497a0b0c245 service nova] Acquired lock "refresh_cache-837b4093-308b-440b-940d-fc0227a5c590" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 741.384740] env[69367]: DEBUG nova.network.neutron [req-5ecd260b-8072-4970-8e9d-89d0300c366c req-96550871-ea22-4bee-b9e7-a497a0b0c245 service nova] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Refreshing network info cache for port 00ad3cfd-f282-442d-b152-85e841dd8a16 {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 741.386010] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:17:02:f1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e6f11c0d-c73a-47f5-b02e-47bff48da0e4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '00ad3cfd-f282-442d-b152-85e841dd8a16', 'vif_model': 'vmxnet3'}] {{(pid=69367) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 741.396262] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Creating folder: Project (f5bc3d470905412ea72a8eedb98e9e47). Parent ref: group-v837645. {{(pid=69367) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 741.396964] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-52acab55-87ad-4697-9031-28a32ca14ccd {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.410229] env[69367]: INFO nova.virt.vmwareapi.vm_util [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Created folder: Project (f5bc3d470905412ea72a8eedb98e9e47) in parent group-v837645. [ 741.410475] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Creating folder: Instances. Parent ref: group-v837715. {{(pid=69367) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 741.410784] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e680101d-323a-4d57-8afb-40df4860065d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.428999] env[69367]: INFO nova.virt.vmwareapi.vm_util [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Created folder: Instances in parent group-v837715. [ 741.429385] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 741.429622] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Creating VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 741.429869] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9e038c32-07e0-47b9-bdc1-82fbb8c9e274 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.448686] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Destroying instance {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 741.450764] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dae7daf-8080-4f61-960b-896ce6b89c3f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.458959] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Unregistering the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 741.460249] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-294253cb-b591-4bd7-850c-75ff095868f6 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.462021] env[69367]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 741.462021] env[69367]: value = "task-4233930" [ 741.462021] env[69367]: _type = "Task" [ 741.462021] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.471508] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4233930, 'name': CreateVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.539178] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Unregistered the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 741.539527] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Deleting contents of the VM from datastore datastore2 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 741.539618] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Deleting the datastore file [datastore2] 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 741.542818] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7555828c-4c04-4457-a27b-5525d0495ba7 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.555492] env[69367]: DEBUG oslo_vmware.api [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Waiting for the task: (returnval){ [ 741.555492] env[69367]: value = "task-4233932" [ 741.555492] env[69367]: _type = "Task" [ 741.555492] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.566074] env[69367]: DEBUG oslo_vmware.api [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4233932, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.567419] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 741.607051] env[69367]: INFO nova.compute.manager [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] Rebuilding instance [ 741.653591] env[69367]: DEBUG nova.compute.manager [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] Checking state {{(pid=69367) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 741.654599] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-394385b0-4053-42bc-8459-d2a393f2b7eb {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.710282] env[69367]: DEBUG nova.scheduler.client.report [None req-6d12701b-472c-4f21-b3e9-0df43501af60 tempest-ServerActionsV293TestJSON-1803979304 tempest-ServerActionsV293TestJSON-1803979304-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 741.725702] env[69367]: DEBUG nova.scheduler.client.report [None req-6d12701b-472c-4f21-b3e9-0df43501af60 tempest-ServerActionsV293TestJSON-1803979304 tempest-ServerActionsV293TestJSON-1803979304-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 741.725946] env[69367]: DEBUG nova.compute.provider_tree [None req-6d12701b-472c-4f21-b3e9-0df43501af60 tempest-ServerActionsV293TestJSON-1803979304 tempest-ServerActionsV293TestJSON-1803979304-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 741.738820] env[69367]: DEBUG nova.scheduler.client.report [None req-6d12701b-472c-4f21-b3e9-0df43501af60 tempest-ServerActionsV293TestJSON-1803979304 tempest-ServerActionsV293TestJSON-1803979304-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 741.760319] env[69367]: DEBUG nova.scheduler.client.report [None req-6d12701b-472c-4f21-b3e9-0df43501af60 tempest-ServerActionsV293TestJSON-1803979304 tempest-ServerActionsV293TestJSON-1803979304-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 741.805735] env[69367]: DEBUG oslo_vmware.api [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Task: {'id': task-4233927, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.514072} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.806107] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore2] f8c07fa1-d27c-4d0f-847b-481477cd04bf/f8c07fa1-d27c-4d0f-847b-481477cd04bf.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 741.806429] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] Extending root virtual disk to 1048576 {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 741.806776] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-70595c00-7921-44bf-8c56-b3b613877944 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.815407] env[69367]: DEBUG oslo_vmware.api [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Waiting for the task: (returnval){ [ 741.815407] env[69367]: value = "task-4233933" [ 741.815407] env[69367]: _type = "Task" [ 741.815407] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.828853] env[69367]: DEBUG oslo_vmware.api [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Task: {'id': task-4233933, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.977235] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4233930, 'name': CreateVM_Task, 'duration_secs': 0.373839} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.977424] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Created VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 741.978123] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 741.978296] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 741.978624] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 741.978872] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-754e4487-72d7-41ad-8007-5e41d882917a {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.984238] env[69367]: DEBUG oslo_vmware.api [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Waiting for the task: (returnval){ [ 741.984238] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52095978-c79e-f006-f5fd-75f40a9a5c7e" [ 741.984238] env[69367]: _type = "Task" [ 741.984238] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.994173] env[69367]: DEBUG oslo_vmware.api [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52095978-c79e-f006-f5fd-75f40a9a5c7e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.066552] env[69367]: DEBUG oslo_vmware.api [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4233932, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.147505} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.068812] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Deleted the datastore file {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 742.068996] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Deleted contents of the VM from datastore datastore2 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 742.069184] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Instance destroyed {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 742.092397] env[69367]: INFO nova.scheduler.client.report [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Deleted allocations for instance 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa [ 742.192675] env[69367]: INFO nova.scheduler.client.report [None req-8cddd887-a3bc-4c75-aa6b-511f6208e23b tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Deleted allocations for instance a0b99237-8f23-40ec-827f-af75961a096d [ 742.316205] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96ff7f48-64c1-4e34-8741-760dd56f75dd {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.333721] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fb74af0-6a02-4b1d-8b28-90719e34cd43 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.337141] env[69367]: DEBUG oslo_vmware.api [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Task: {'id': task-4233933, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075334} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.337412] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] Extended root virtual disk {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 742.338604] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1fd15ad-0a5d-4e9b-99c8-616edec917a6 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.367040] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10925f97-d83f-4665-997f-df0acf64c1f6 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.389260] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] Reconfiguring VM instance instance-0000002b to attach disk [datastore2] f8c07fa1-d27c-4d0f-847b-481477cd04bf/f8c07fa1-d27c-4d0f-847b-481477cd04bf.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 742.393026] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-23a5b4fd-f68f-410a-a22c-3e3335651b3d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.415024] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e261732-3692-46ce-a4cc-c95331272a4a {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.417633] env[69367]: DEBUG oslo_vmware.api [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Waiting for the task: (returnval){ [ 742.417633] env[69367]: value = "task-4233934" [ 742.417633] env[69367]: _type = "Task" [ 742.417633] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.428866] env[69367]: DEBUG nova.compute.provider_tree [None req-6d12701b-472c-4f21-b3e9-0df43501af60 tempest-ServerActionsV293TestJSON-1803979304 tempest-ServerActionsV293TestJSON-1803979304-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 742.436141] env[69367]: DEBUG oslo_vmware.api [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Task: {'id': task-4233934, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.499112] env[69367]: DEBUG oslo_vmware.api [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52095978-c79e-f006-f5fd-75f40a9a5c7e, 'name': SearchDatastore_Task, 'duration_secs': 0.015212} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.499112] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 742.499112] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Processing image 2b099420-9152-4d93-9609-4c9317824c11 {{(pid=69367) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 742.499112] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 742.499275] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 742.499275] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 742.499275] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-748c9eae-6630-4747-b325-33d742cf22b7 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.507533] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 742.507719] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69367) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 742.508473] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-968f524c-b53a-48fa-9548-64033489d9e8 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.515440] env[69367]: DEBUG oslo_vmware.api [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Waiting for the task: (returnval){ [ 742.515440] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52f83be3-c8de-db6c-1d0d-0d45a7fe4ca6" [ 742.515440] env[69367]: _type = "Task" [ 742.515440] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.523688] env[69367]: DEBUG oslo_vmware.api [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52f83be3-c8de-db6c-1d0d-0d45a7fe4ca6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.571446] env[69367]: DEBUG nova.network.neutron [req-5ecd260b-8072-4970-8e9d-89d0300c366c req-96550871-ea22-4bee-b9e7-a497a0b0c245 service nova] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Updated VIF entry in instance network info cache for port 00ad3cfd-f282-442d-b152-85e841dd8a16. {{(pid=69367) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 742.571814] env[69367]: DEBUG nova.network.neutron [req-5ecd260b-8072-4970-8e9d-89d0300c366c req-96550871-ea22-4bee-b9e7-a497a0b0c245 service nova] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Updating instance_info_cache with network_info: [{"id": "00ad3cfd-f282-442d-b152-85e841dd8a16", "address": "fa:16:3e:17:02:f1", "network": {"id": "a8e5c1a6-2526-4042-8a8d-fdb54dbe7bf9", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1429177141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "f5bc3d470905412ea72a8eedb98e9e47", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e6f11c0d-c73a-47f5-b02e-47bff48da0e4", "external-id": "nsx-vlan-transportzone-345", "segmentation_id": 345, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap00ad3cfd-f2", "ovs_interfaceid": "00ad3cfd-f282-442d-b152-85e841dd8a16", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 742.597153] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 742.669737] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] Powering off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 742.670466] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-82a46e6e-0926-464a-94af-9f2a5f70919c {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.678293] env[69367]: DEBUG oslo_vmware.api [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Waiting for the task: (returnval){ [ 742.678293] env[69367]: value = "task-4233935" [ 742.678293] env[69367]: _type = "Task" [ 742.678293] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.687600] env[69367]: DEBUG oslo_vmware.api [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Task: {'id': task-4233935, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.704058] env[69367]: DEBUG oslo_concurrency.lockutils [None req-b4416836-0f76-4b31-aa99-b0fec3a75acb tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 742.704683] env[69367]: DEBUG oslo_concurrency.lockutils [None req-8cddd887-a3bc-4c75-aa6b-511f6208e23b tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Lock "a0b99237-8f23-40ec-827f-af75961a096d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.314s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 742.935936] env[69367]: DEBUG oslo_vmware.api [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Task: {'id': task-4233934, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.958019] env[69367]: ERROR nova.scheduler.client.report [None req-6d12701b-472c-4f21-b3e9-0df43501af60 tempest-ServerActionsV293TestJSON-1803979304 tempest-ServerActionsV293TestJSON-1803979304-project-member] [req-f7dbca4a-3dbc-4def-a591-e8652b7916f2] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-f7dbca4a-3dbc-4def-a591-e8652b7916f2"}]} [ 742.958019] env[69367]: DEBUG oslo_concurrency.lockutils [None req-6d12701b-472c-4f21-b3e9-0df43501af60 tempest-ServerActionsV293TestJSON-1803979304 tempest-ServerActionsV293TestJSON-1803979304-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.287s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 742.958304] env[69367]: ERROR nova.compute.manager [None req-6d12701b-472c-4f21-b3e9-0df43501af60 tempest-ServerActionsV293TestJSON-1803979304 tempest-ServerActionsV293TestJSON-1803979304-project-member] [instance: e6a9b69c-e00d-4260-84c7-2d98ce80ead0] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 742.958304] env[69367]: ERROR nova.compute.manager [instance: e6a9b69c-e00d-4260-84c7-2d98ce80ead0] Traceback (most recent call last): [ 742.958304] env[69367]: ERROR nova.compute.manager [instance: e6a9b69c-e00d-4260-84c7-2d98ce80ead0] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 742.958304] env[69367]: ERROR nova.compute.manager [instance: e6a9b69c-e00d-4260-84c7-2d98ce80ead0] yield [ 742.958304] env[69367]: ERROR nova.compute.manager [instance: e6a9b69c-e00d-4260-84c7-2d98ce80ead0] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 742.958304] env[69367]: ERROR nova.compute.manager [instance: e6a9b69c-e00d-4260-84c7-2d98ce80ead0] self.set_inventory_for_provider( [ 742.958304] env[69367]: ERROR nova.compute.manager [instance: e6a9b69c-e00d-4260-84c7-2d98ce80ead0] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 742.958304] env[69367]: ERROR nova.compute.manager [instance: e6a9b69c-e00d-4260-84c7-2d98ce80ead0] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 742.958531] env[69367]: ERROR nova.compute.manager [instance: e6a9b69c-e00d-4260-84c7-2d98ce80ead0] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-f7dbca4a-3dbc-4def-a591-e8652b7916f2"}]} [ 742.958531] env[69367]: ERROR nova.compute.manager [instance: e6a9b69c-e00d-4260-84c7-2d98ce80ead0] [ 742.958531] env[69367]: ERROR nova.compute.manager [instance: e6a9b69c-e00d-4260-84c7-2d98ce80ead0] During handling of the above exception, another exception occurred: [ 742.958531] env[69367]: ERROR nova.compute.manager [instance: e6a9b69c-e00d-4260-84c7-2d98ce80ead0] [ 742.958531] env[69367]: ERROR nova.compute.manager [instance: e6a9b69c-e00d-4260-84c7-2d98ce80ead0] Traceback (most recent call last): [ 742.958531] env[69367]: ERROR nova.compute.manager [instance: e6a9b69c-e00d-4260-84c7-2d98ce80ead0] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 742.958531] env[69367]: ERROR nova.compute.manager [instance: e6a9b69c-e00d-4260-84c7-2d98ce80ead0] with self.rt.instance_claim(context, instance, node, allocs, [ 742.958531] env[69367]: ERROR nova.compute.manager [instance: e6a9b69c-e00d-4260-84c7-2d98ce80ead0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 742.958531] env[69367]: ERROR nova.compute.manager [instance: e6a9b69c-e00d-4260-84c7-2d98ce80ead0] return f(*args, **kwargs) [ 742.958792] env[69367]: ERROR nova.compute.manager [instance: e6a9b69c-e00d-4260-84c7-2d98ce80ead0] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 742.958792] env[69367]: ERROR nova.compute.manager [instance: e6a9b69c-e00d-4260-84c7-2d98ce80ead0] self._update(elevated, cn) [ 742.958792] env[69367]: ERROR nova.compute.manager [instance: e6a9b69c-e00d-4260-84c7-2d98ce80ead0] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 742.958792] env[69367]: ERROR nova.compute.manager [instance: e6a9b69c-e00d-4260-84c7-2d98ce80ead0] self._update_to_placement(context, compute_node, startup) [ 742.958792] env[69367]: ERROR nova.compute.manager [instance: e6a9b69c-e00d-4260-84c7-2d98ce80ead0] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 742.958792] env[69367]: ERROR nova.compute.manager [instance: e6a9b69c-e00d-4260-84c7-2d98ce80ead0] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 742.958792] env[69367]: ERROR nova.compute.manager [instance: e6a9b69c-e00d-4260-84c7-2d98ce80ead0] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 742.958792] env[69367]: ERROR nova.compute.manager [instance: e6a9b69c-e00d-4260-84c7-2d98ce80ead0] return attempt.get(self._wrap_exception) [ 742.958792] env[69367]: ERROR nova.compute.manager [instance: e6a9b69c-e00d-4260-84c7-2d98ce80ead0] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 742.958792] env[69367]: ERROR nova.compute.manager [instance: e6a9b69c-e00d-4260-84c7-2d98ce80ead0] six.reraise(self.value[0], self.value[1], self.value[2]) [ 742.958792] env[69367]: ERROR nova.compute.manager [instance: e6a9b69c-e00d-4260-84c7-2d98ce80ead0] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 742.958792] env[69367]: ERROR nova.compute.manager [instance: e6a9b69c-e00d-4260-84c7-2d98ce80ead0] raise value [ 742.958792] env[69367]: ERROR nova.compute.manager [instance: e6a9b69c-e00d-4260-84c7-2d98ce80ead0] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 742.959178] env[69367]: ERROR nova.compute.manager [instance: e6a9b69c-e00d-4260-84c7-2d98ce80ead0] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 742.959178] env[69367]: ERROR nova.compute.manager [instance: e6a9b69c-e00d-4260-84c7-2d98ce80ead0] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 742.959178] env[69367]: ERROR nova.compute.manager [instance: e6a9b69c-e00d-4260-84c7-2d98ce80ead0] self.reportclient.update_from_provider_tree( [ 742.959178] env[69367]: ERROR nova.compute.manager [instance: e6a9b69c-e00d-4260-84c7-2d98ce80ead0] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 742.959178] env[69367]: ERROR nova.compute.manager [instance: e6a9b69c-e00d-4260-84c7-2d98ce80ead0] with catch_all(pd.uuid): [ 742.959178] env[69367]: ERROR nova.compute.manager [instance: e6a9b69c-e00d-4260-84c7-2d98ce80ead0] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 742.959178] env[69367]: ERROR nova.compute.manager [instance: e6a9b69c-e00d-4260-84c7-2d98ce80ead0] self.gen.throw(typ, value, traceback) [ 742.959178] env[69367]: ERROR nova.compute.manager [instance: e6a9b69c-e00d-4260-84c7-2d98ce80ead0] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 742.959178] env[69367]: ERROR nova.compute.manager [instance: e6a9b69c-e00d-4260-84c7-2d98ce80ead0] raise exception.ResourceProviderSyncFailed() [ 742.959178] env[69367]: ERROR nova.compute.manager [instance: e6a9b69c-e00d-4260-84c7-2d98ce80ead0] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 742.959178] env[69367]: ERROR nova.compute.manager [instance: e6a9b69c-e00d-4260-84c7-2d98ce80ead0] [ 742.959536] env[69367]: DEBUG nova.compute.utils [None req-6d12701b-472c-4f21-b3e9-0df43501af60 tempest-ServerActionsV293TestJSON-1803979304 tempest-ServerActionsV293TestJSON-1803979304-project-member] [instance: e6a9b69c-e00d-4260-84c7-2d98ce80ead0] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 742.965820] env[69367]: DEBUG oslo_concurrency.lockutils [None req-be828dd9-f06a-4267-b660-1a75113835c1 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.511s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 742.965820] env[69367]: DEBUG oslo_concurrency.lockutils [None req-be828dd9-f06a-4267-b660-1a75113835c1 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 742.965820] env[69367]: INFO nova.compute.manager [None req-be828dd9-f06a-4267-b660-1a75113835c1 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] [instance: 92c27615-d377-492f-a9db-ff45b2e71537] Successfully reverted task state from None on failure for instance. [ 742.968151] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 23.026s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 742.971179] env[69367]: DEBUG nova.compute.manager [None req-6d12701b-472c-4f21-b3e9-0df43501af60 tempest-ServerActionsV293TestJSON-1803979304 tempest-ServerActionsV293TestJSON-1803979304-project-member] [instance: e6a9b69c-e00d-4260-84c7-2d98ce80ead0] Build of instance e6a9b69c-e00d-4260-84c7-2d98ce80ead0 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 742.974323] env[69367]: DEBUG nova.compute.manager [None req-6d12701b-472c-4f21-b3e9-0df43501af60 tempest-ServerActionsV293TestJSON-1803979304 tempest-ServerActionsV293TestJSON-1803979304-project-member] [instance: e6a9b69c-e00d-4260-84c7-2d98ce80ead0] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 742.974323] env[69367]: DEBUG oslo_concurrency.lockutils [None req-6d12701b-472c-4f21-b3e9-0df43501af60 tempest-ServerActionsV293TestJSON-1803979304 tempest-ServerActionsV293TestJSON-1803979304-project-member] Acquiring lock "refresh_cache-e6a9b69c-e00d-4260-84c7-2d98ce80ead0" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 742.974323] env[69367]: DEBUG oslo_concurrency.lockutils [None req-6d12701b-472c-4f21-b3e9-0df43501af60 tempest-ServerActionsV293TestJSON-1803979304 tempest-ServerActionsV293TestJSON-1803979304-project-member] Acquired lock "refresh_cache-e6a9b69c-e00d-4260-84c7-2d98ce80ead0" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 742.974323] env[69367]: DEBUG nova.network.neutron [None req-6d12701b-472c-4f21-b3e9-0df43501af60 tempest-ServerActionsV293TestJSON-1803979304 tempest-ServerActionsV293TestJSON-1803979304-project-member] [instance: e6a9b69c-e00d-4260-84c7-2d98ce80ead0] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 742.976442] env[69367]: ERROR oslo_messaging.rpc.server [None req-be828dd9-f06a-4267-b660-1a75113835c1 tempest-ServersAdminTestJSON-963435758 tempest-ServersAdminTestJSON-963435758-project-member] Exception during message handling: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 742.976442] env[69367]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 742.976442] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 742.976442] env[69367]: ERROR oslo_messaging.rpc.server yield [ 742.976442] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 742.976442] env[69367]: ERROR oslo_messaging.rpc.server self.set_inventory_for_provider( [ 742.976442] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 742.976442] env[69367]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 742.976442] env[69367]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-69f917c4-a53d-4900-ab81-32e496e7ed75"}]} [ 742.976442] env[69367]: ERROR oslo_messaging.rpc.server [ 742.976868] env[69367]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 742.976868] env[69367]: ERROR oslo_messaging.rpc.server [ 742.976868] env[69367]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 742.976868] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 174, in _process_incoming [ 742.976868] env[69367]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 742.976868] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 742.976868] env[69367]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 742.976868] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 742.976868] env[69367]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 742.976868] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 742.976868] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 742.976868] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 742.976868] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 742.976868] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 742.976868] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 742.976868] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 742.976868] env[69367]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 742.976868] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 167, in decorated_function [ 742.977507] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 742.977507] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 742.977507] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 742.977507] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 742.977507] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 742.977507] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 158, in decorated_function [ 742.977507] env[69367]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 742.977507] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1483, in decorated_function [ 742.977507] env[69367]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 742.977507] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 214, in decorated_function [ 742.977507] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 742.977507] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 742.977507] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 742.977507] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 742.977507] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 742.977507] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 204, in decorated_function [ 742.977507] env[69367]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 742.977507] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3388, in terminate_instance [ 742.978298] env[69367]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 742.978298] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 742.978298] env[69367]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 742.978298] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3383, in do_terminate_instance [ 742.978298] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 742.978298] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 742.978298] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 742.978298] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 742.978298] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 742.978298] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 742.978298] env[69367]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 742.978298] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 742.978298] env[69367]: ERROR oslo_messaging.rpc.server self._complete_deletion(context, instance) [ 742.978298] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 742.978298] env[69367]: ERROR oslo_messaging.rpc.server self._update_resource_tracker(context, instance) [ 742.978298] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 742.978298] env[69367]: ERROR oslo_messaging.rpc.server self.rt.update_usage(context, instance, instance.node) [ 742.978298] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 742.980887] env[69367]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 742.980887] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 742.980887] env[69367]: ERROR oslo_messaging.rpc.server self._update(context.elevated(), self.compute_nodes[nodename]) [ 742.980887] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 742.980887] env[69367]: ERROR oslo_messaging.rpc.server self._update_to_placement(context, compute_node, startup) [ 742.980887] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 742.980887] env[69367]: ERROR oslo_messaging.rpc.server return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 742.980887] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 742.980887] env[69367]: ERROR oslo_messaging.rpc.server return attempt.get(self._wrap_exception) [ 742.980887] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 742.980887] env[69367]: ERROR oslo_messaging.rpc.server six.reraise(self.value[0], self.value[1], self.value[2]) [ 742.980887] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 742.980887] env[69367]: ERROR oslo_messaging.rpc.server raise value [ 742.980887] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 742.980887] env[69367]: ERROR oslo_messaging.rpc.server attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 742.980887] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 742.980887] env[69367]: ERROR oslo_messaging.rpc.server self.reportclient.update_from_provider_tree( [ 742.981305] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 742.981305] env[69367]: ERROR oslo_messaging.rpc.server with catch_all(pd.uuid): [ 742.981305] env[69367]: ERROR oslo_messaging.rpc.server File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 742.981305] env[69367]: ERROR oslo_messaging.rpc.server self.gen.throw(typ, value, traceback) [ 742.981305] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 742.981305] env[69367]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderSyncFailed() [ 742.981305] env[69367]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 742.981305] env[69367]: ERROR oslo_messaging.rpc.server [ 742.981305] env[69367]: DEBUG nova.compute.manager [req-9315a4f2-6c5c-4f50-b14b-7c96402c0a6a req-44b11593-738e-476a-b687-a711dae69502 service nova] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Received event network-vif-unplugged-fa738fa1-0be4-4506-8e42-73671661dee1 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 742.981305] env[69367]: DEBUG oslo_concurrency.lockutils [req-9315a4f2-6c5c-4f50-b14b-7c96402c0a6a req-44b11593-738e-476a-b687-a711dae69502 service nova] Acquiring lock "8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 742.982384] env[69367]: DEBUG oslo_concurrency.lockutils [req-9315a4f2-6c5c-4f50-b14b-7c96402c0a6a req-44b11593-738e-476a-b687-a711dae69502 service nova] Lock "8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 742.982384] env[69367]: DEBUG oslo_concurrency.lockutils [req-9315a4f2-6c5c-4f50-b14b-7c96402c0a6a req-44b11593-738e-476a-b687-a711dae69502 service nova] Lock "8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 742.982384] env[69367]: DEBUG nova.compute.manager [req-9315a4f2-6c5c-4f50-b14b-7c96402c0a6a req-44b11593-738e-476a-b687-a711dae69502 service nova] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] No waiting events found dispatching network-vif-unplugged-fa738fa1-0be4-4506-8e42-73671661dee1 {{(pid=69367) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 742.982384] env[69367]: WARNING nova.compute.manager [req-9315a4f2-6c5c-4f50-b14b-7c96402c0a6a req-44b11593-738e-476a-b687-a711dae69502 service nova] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Received unexpected event network-vif-unplugged-fa738fa1-0be4-4506-8e42-73671661dee1 for instance with vm_state shelved_offloaded and task_state unshelving. [ 742.982384] env[69367]: DEBUG nova.compute.manager [req-9315a4f2-6c5c-4f50-b14b-7c96402c0a6a req-44b11593-738e-476a-b687-a711dae69502 service nova] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Received event network-changed-fa738fa1-0be4-4506-8e42-73671661dee1 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 742.982617] env[69367]: DEBUG nova.compute.manager [req-9315a4f2-6c5c-4f50-b14b-7c96402c0a6a req-44b11593-738e-476a-b687-a711dae69502 service nova] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Refreshing instance network info cache due to event network-changed-fa738fa1-0be4-4506-8e42-73671661dee1. {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11772}} [ 742.982617] env[69367]: DEBUG oslo_concurrency.lockutils [req-9315a4f2-6c5c-4f50-b14b-7c96402c0a6a req-44b11593-738e-476a-b687-a711dae69502 service nova] Acquiring lock "refresh_cache-8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 742.982617] env[69367]: DEBUG oslo_concurrency.lockutils [req-9315a4f2-6c5c-4f50-b14b-7c96402c0a6a req-44b11593-738e-476a-b687-a711dae69502 service nova] Acquired lock "refresh_cache-8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 742.982617] env[69367]: DEBUG nova.network.neutron [req-9315a4f2-6c5c-4f50-b14b-7c96402c0a6a req-44b11593-738e-476a-b687-a711dae69502 service nova] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Refreshing network info cache for port fa738fa1-0be4-4506-8e42-73671661dee1 {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 743.028895] env[69367]: DEBUG oslo_vmware.api [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52f83be3-c8de-db6c-1d0d-0d45a7fe4ca6, 'name': SearchDatastore_Task, 'duration_secs': 0.009315} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.033531] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bcb24962-13b2-40c9-88c3-0595189f7ce2 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.039024] env[69367]: DEBUG oslo_vmware.api [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Waiting for the task: (returnval){ [ 743.039024] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52c201b3-e360-1068-ac34-7bee5680d032" [ 743.039024] env[69367]: _type = "Task" [ 743.039024] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.055497] env[69367]: DEBUG oslo_vmware.api [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52c201b3-e360-1068-ac34-7bee5680d032, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.075535] env[69367]: DEBUG oslo_concurrency.lockutils [req-5ecd260b-8072-4970-8e9d-89d0300c366c req-96550871-ea22-4bee-b9e7-a497a0b0c245 service nova] Releasing lock "refresh_cache-837b4093-308b-440b-940d-fc0227a5c590" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 743.192088] env[69367]: DEBUG oslo_vmware.api [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Task: {'id': task-4233935, 'name': PowerOffVM_Task, 'duration_secs': 0.190124} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.192752] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] Powered off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 743.193567] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] Destroying instance {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 743.194398] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-007d729d-b830-4932-acf7-a753d2d12440 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.202213] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] Unregistering the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 743.202654] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4d203e59-00f6-4ca8-b755-42c870211704 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.211323] env[69367]: DEBUG nova.compute.manager [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 743.229493] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] Unregistered the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 743.229821] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] Deleting contents of the VM from datastore datastore2 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 743.230251] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Deleting the datastore file [datastore2] 011ab7de-98a7-41fc-9e05-e71965c73c09 {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 743.231256] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b6af0acb-ae5d-4c73-8906-c2927ba056a9 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.238268] env[69367]: DEBUG oslo_vmware.api [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Waiting for the task: (returnval){ [ 743.238268] env[69367]: value = "task-4233937" [ 743.238268] env[69367]: _type = "Task" [ 743.238268] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.247775] env[69367]: DEBUG oslo_vmware.api [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Task: {'id': task-4233937, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.432049] env[69367]: DEBUG oslo_vmware.api [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Task: {'id': task-4233934, 'name': ReconfigVM_Task, 'duration_secs': 0.65313} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.432435] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] Reconfigured VM instance instance-0000002b to attach disk [datastore2] f8c07fa1-d27c-4d0f-847b-481477cd04bf/f8c07fa1-d27c-4d0f-847b-481477cd04bf.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 743.433335] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dd46a766-e10a-43c4-a28a-b1b2617b0b5a {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.444095] env[69367]: DEBUG oslo_vmware.api [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Waiting for the task: (returnval){ [ 743.444095] env[69367]: value = "task-4233938" [ 743.444095] env[69367]: _type = "Task" [ 743.444095] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.453431] env[69367]: DEBUG oslo_vmware.api [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Task: {'id': task-4233938, 'name': Rename_Task} progress is 5%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.521199] env[69367]: DEBUG nova.network.neutron [None req-6d12701b-472c-4f21-b3e9-0df43501af60 tempest-ServerActionsV293TestJSON-1803979304 tempest-ServerActionsV293TestJSON-1803979304-project-member] [instance: e6a9b69c-e00d-4260-84c7-2d98ce80ead0] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 743.560200] env[69367]: DEBUG oslo_vmware.api [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52c201b3-e360-1068-ac34-7bee5680d032, 'name': SearchDatastore_Task, 'duration_secs': 0.011086} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.560200] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 743.560330] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore1] 837b4093-308b-440b-940d-fc0227a5c590/837b4093-308b-440b-940d-fc0227a5c590.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 743.560661] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-98cbabee-5487-45ec-aba8-8929c5b5199e {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.571701] env[69367]: DEBUG oslo_vmware.api [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Waiting for the task: (returnval){ [ 743.571701] env[69367]: value = "task-4233939" [ 743.571701] env[69367]: _type = "Task" [ 743.571701] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.582182] env[69367]: DEBUG oslo_vmware.api [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Task: {'id': task-4233939, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.673821] env[69367]: DEBUG nova.network.neutron [None req-6d12701b-472c-4f21-b3e9-0df43501af60 tempest-ServerActionsV293TestJSON-1803979304 tempest-ServerActionsV293TestJSON-1803979304-project-member] [instance: e6a9b69c-e00d-4260-84c7-2d98ce80ead0] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 743.736712] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 743.749669] env[69367]: DEBUG oslo_vmware.api [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Task: {'id': task-4233937, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.088971} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.749946] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Deleted the datastore file {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 743.750189] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] Deleted contents of the VM from datastore datastore2 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 743.752107] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] Instance destroyed {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 743.893934] env[69367]: DEBUG nova.network.neutron [req-9315a4f2-6c5c-4f50-b14b-7c96402c0a6a req-44b11593-738e-476a-b687-a711dae69502 service nova] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Updated VIF entry in instance network info cache for port fa738fa1-0be4-4506-8e42-73671661dee1. {{(pid=69367) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 743.894409] env[69367]: DEBUG nova.network.neutron [req-9315a4f2-6c5c-4f50-b14b-7c96402c0a6a req-44b11593-738e-476a-b687-a711dae69502 service nova] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Updating instance_info_cache with network_info: [{"id": "fa738fa1-0be4-4506-8e42-73671661dee1", "address": "fa:16:3e:c7:7c:7c", "network": {"id": "0b0a82c2-1a70-4174-a75e-5863d3505b2c", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-1091682254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.159", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "26a89ab4163e4b9a801dcbf11c953cf3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapfa738fa1-0b", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 743.959367] env[69367]: DEBUG oslo_vmware.api [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Task: {'id': task-4233938, 'name': Rename_Task, 'duration_secs': 0.140815} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.959695] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] Powering on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 743.960046] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-79989104-b87f-4b65-8e26-09611a2fcb0e {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.968016] env[69367]: DEBUG oslo_vmware.api [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Waiting for the task: (returnval){ [ 743.968016] env[69367]: value = "task-4233940" [ 743.968016] env[69367]: _type = "Task" [ 743.968016] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.983311] env[69367]: DEBUG oslo_vmware.api [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Task: {'id': task-4233940, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.084931] env[69367]: DEBUG oslo_vmware.api [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Task: {'id': task-4233939, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.177139] env[69367]: DEBUG oslo_concurrency.lockutils [None req-6d12701b-472c-4f21-b3e9-0df43501af60 tempest-ServerActionsV293TestJSON-1803979304 tempest-ServerActionsV293TestJSON-1803979304-project-member] Releasing lock "refresh_cache-e6a9b69c-e00d-4260-84c7-2d98ce80ead0" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 744.177139] env[69367]: DEBUG nova.compute.manager [None req-6d12701b-472c-4f21-b3e9-0df43501af60 tempest-ServerActionsV293TestJSON-1803979304 tempest-ServerActionsV293TestJSON-1803979304-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 744.177139] env[69367]: DEBUG nova.compute.manager [None req-6d12701b-472c-4f21-b3e9-0df43501af60 tempest-ServerActionsV293TestJSON-1803979304 tempest-ServerActionsV293TestJSON-1803979304-project-member] [instance: e6a9b69c-e00d-4260-84c7-2d98ce80ead0] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 744.177328] env[69367]: DEBUG nova.network.neutron [None req-6d12701b-472c-4f21-b3e9-0df43501af60 tempest-ServerActionsV293TestJSON-1803979304 tempest-ServerActionsV293TestJSON-1803979304-project-member] [instance: e6a9b69c-e00d-4260-84c7-2d98ce80ead0] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 744.195101] env[69367]: DEBUG nova.network.neutron [None req-6d12701b-472c-4f21-b3e9-0df43501af60 tempest-ServerActionsV293TestJSON-1803979304 tempest-ServerActionsV293TestJSON-1803979304-project-member] [instance: e6a9b69c-e00d-4260-84c7-2d98ce80ead0] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 744.321030] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a1156e1e-41ad-40b1-a520-164694ed7948 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Acquiring lock "8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 744.400262] env[69367]: DEBUG oslo_concurrency.lockutils [req-9315a4f2-6c5c-4f50-b14b-7c96402c0a6a req-44b11593-738e-476a-b687-a711dae69502 service nova] Releasing lock "refresh_cache-8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 744.480608] env[69367]: DEBUG oslo_vmware.api [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Task: {'id': task-4233940, 'name': PowerOnVM_Task, 'duration_secs': 0.491651} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.480985] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] Powered on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 744.481213] env[69367]: INFO nova.compute.manager [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] Took 8.29 seconds to spawn the instance on the hypervisor. [ 744.481399] env[69367]: DEBUG nova.compute.manager [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] Checking state {{(pid=69367) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 744.482313] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0b81352-cc04-4b09-b2df-96a49ed6578e {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.584008] env[69367]: DEBUG oslo_vmware.api [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Task: {'id': task-4233939, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.515117} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.584298] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore1] 837b4093-308b-440b-940d-fc0227a5c590/837b4093-308b-440b-940d-fc0227a5c590.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 744.584505] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Extending root virtual disk to 1048576 {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 744.584762] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9f1d007d-ec3e-4bda-816d-3cca37fac78f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.592853] env[69367]: DEBUG oslo_vmware.api [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Waiting for the task: (returnval){ [ 744.592853] env[69367]: value = "task-4233941" [ 744.592853] env[69367]: _type = "Task" [ 744.592853] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.603122] env[69367]: DEBUG oslo_vmware.api [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Task: {'id': task-4233941, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.697925] env[69367]: DEBUG nova.network.neutron [None req-6d12701b-472c-4f21-b3e9-0df43501af60 tempest-ServerActionsV293TestJSON-1803979304 tempest-ServerActionsV293TestJSON-1803979304-project-member] [instance: e6a9b69c-e00d-4260-84c7-2d98ce80ead0] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 744.788961] env[69367]: DEBUG nova.virt.hardware [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-19T12:49:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-19T12:49:28Z,direct_url=,disk_format='vmdk',id=2b099420-9152-4d93-9609-4c9317824c11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cd7c200d5cd6461fb951580f8c764c42',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-19T12:49:29Z,virtual_size=,visibility=), allow threads: False {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 744.789252] env[69367]: DEBUG nova.virt.hardware [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Flavor limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 744.789413] env[69367]: DEBUG nova.virt.hardware [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Image limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 744.789599] env[69367]: DEBUG nova.virt.hardware [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Flavor pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 744.789746] env[69367]: DEBUG nova.virt.hardware [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Image pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 744.789939] env[69367]: DEBUG nova.virt.hardware [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 744.790212] env[69367]: DEBUG nova.virt.hardware [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 744.790384] env[69367]: DEBUG nova.virt.hardware [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 744.790558] env[69367]: DEBUG nova.virt.hardware [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Got 1 possible topologies {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 744.790724] env[69367]: DEBUG nova.virt.hardware [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 744.790901] env[69367]: DEBUG nova.virt.hardware [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 744.791819] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad0b67f5-2b84-441b-9b20-cf68bc5627f1 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.801400] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e28dd6db-b1b7-4c9b-b947-e5bda53a99eb {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.816300] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] Instance VIF info [] {{(pid=69367) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 744.822643] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 744.823061] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] Creating VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 744.824187] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4f81dbc8-8dd7-42be-afc2-d918eec1d81e {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.842248] env[69367]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 744.842248] env[69367]: value = "task-4233942" [ 744.842248] env[69367]: _type = "Task" [ 744.842248] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.852863] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4233942, 'name': CreateVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.005839] env[69367]: INFO nova.compute.manager [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] Took 32.81 seconds to build instance. [ 745.106974] env[69367]: DEBUG oslo_vmware.api [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Task: {'id': task-4233941, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.232971} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.106974] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Extended root virtual disk {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 745.106974] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1251f36c-057f-4acf-9d62-edc98059da74 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.134253] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Reconfiguring VM instance instance-0000002c to attach disk [datastore1] 837b4093-308b-440b-940d-fc0227a5c590/837b4093-308b-440b-940d-fc0227a5c590.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 745.136282] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5c34ad7b-70b1-41d9-b473-a98f08a341c1 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.165635] env[69367]: DEBUG oslo_vmware.api [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Waiting for the task: (returnval){ [ 745.165635] env[69367]: value = "task-4233943" [ 745.165635] env[69367]: _type = "Task" [ 745.165635] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.177390] env[69367]: DEBUG oslo_vmware.api [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Task: {'id': task-4233943, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.203847] env[69367]: INFO nova.compute.manager [None req-6d12701b-472c-4f21-b3e9-0df43501af60 tempest-ServerActionsV293TestJSON-1803979304 tempest-ServerActionsV293TestJSON-1803979304-project-member] [instance: e6a9b69c-e00d-4260-84c7-2d98ce80ead0] Took 1.02 seconds to deallocate network for instance. [ 745.357025] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4233942, 'name': CreateVM_Task, 'duration_secs': 0.486577} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.357025] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] Created VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 745.357025] env[69367]: DEBUG oslo_concurrency.lockutils [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 745.357025] env[69367]: DEBUG oslo_concurrency.lockutils [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 745.357025] env[69367]: DEBUG oslo_concurrency.lockutils [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 745.357025] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5b997c47-3f5a-49b2-81f0-43f237335e84 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.364185] env[69367]: DEBUG oslo_vmware.api [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Waiting for the task: (returnval){ [ 745.364185] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]5258b3a1-63bd-a053-9e79-e0ebc49d408b" [ 745.364185] env[69367]: _type = "Task" [ 745.364185] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.373875] env[69367]: DEBUG oslo_vmware.api [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]5258b3a1-63bd-a053-9e79-e0ebc49d408b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.508572] env[69367]: DEBUG oslo_concurrency.lockutils [None req-877216f0-a699-42aa-99bb-4a741dc9bd1b tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Lock "f8c07fa1-d27c-4d0f-847b-481477cd04bf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.733s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 745.613924] env[69367]: DEBUG oslo_concurrency.lockutils [None req-191f9260-80f3-498c-9752-3dd1e1bfc50b tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Acquiring lock "dd598b7a-057f-48ea-a31e-96e7ccadeb3d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 745.614327] env[69367]: DEBUG oslo_concurrency.lockutils [None req-191f9260-80f3-498c-9752-3dd1e1bfc50b tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Lock "dd598b7a-057f-48ea-a31e-96e7ccadeb3d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 745.658807] env[69367]: INFO nova.compute.manager [None req-43e02186-5124-43e9-925d-8618549710d6 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] Rescuing [ 745.659063] env[69367]: DEBUG oslo_concurrency.lockutils [None req-43e02186-5124-43e9-925d-8618549710d6 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Acquiring lock "refresh_cache-f8c07fa1-d27c-4d0f-847b-481477cd04bf" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 745.659221] env[69367]: DEBUG oslo_concurrency.lockutils [None req-43e02186-5124-43e9-925d-8618549710d6 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Acquired lock "refresh_cache-f8c07fa1-d27c-4d0f-847b-481477cd04bf" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 745.659389] env[69367]: DEBUG nova.network.neutron [None req-43e02186-5124-43e9-925d-8618549710d6 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 745.678979] env[69367]: DEBUG oslo_vmware.api [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Task: {'id': task-4233943, 'name': ReconfigVM_Task, 'duration_secs': 0.312352} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.679317] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Reconfigured VM instance instance-0000002c to attach disk [datastore1] 837b4093-308b-440b-940d-fc0227a5c590/837b4093-308b-440b-940d-fc0227a5c590.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 745.680059] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dbd648b1-dddc-40fe-8d5a-39ea83042c4b {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.687600] env[69367]: DEBUG oslo_vmware.api [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Waiting for the task: (returnval){ [ 745.687600] env[69367]: value = "task-4233944" [ 745.687600] env[69367]: _type = "Task" [ 745.687600] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.696879] env[69367]: DEBUG oslo_vmware.api [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Task: {'id': task-4233944, 'name': Rename_Task} progress is 5%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.878096] env[69367]: DEBUG oslo_vmware.api [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]5258b3a1-63bd-a053-9e79-e0ebc49d408b, 'name': SearchDatastore_Task, 'duration_secs': 0.011974} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.878884] env[69367]: DEBUG oslo_concurrency.lockutils [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 745.879169] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] Processing image 2b099420-9152-4d93-9609-4c9317824c11 {{(pid=69367) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 745.879454] env[69367]: DEBUG oslo_concurrency.lockutils [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 745.879702] env[69367]: DEBUG oslo_concurrency.lockutils [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 745.879943] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 745.880255] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9f70c7f0-880e-46de-96be-3841f515cd4c {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.891838] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 745.891838] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69367) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 745.891838] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d9dae741-53d7-4b6b-812e-61c27ef688f8 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.904399] env[69367]: DEBUG oslo_vmware.api [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Waiting for the task: (returnval){ [ 745.904399] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52bca2fa-2ae0-4759-2f99-20bc0ab3b3cf" [ 745.904399] env[69367]: _type = "Task" [ 745.904399] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.917040] env[69367]: DEBUG oslo_vmware.api [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52bca2fa-2ae0-4759-2f99-20bc0ab3b3cf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.014692] env[69367]: DEBUG nova.compute.manager [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 746.198547] env[69367]: DEBUG oslo_vmware.api [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Task: {'id': task-4233944, 'name': Rename_Task, 'duration_secs': 0.191149} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.198828] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Powering on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 746.199092] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3f22ba9b-da6b-4df7-96dc-1dc3c38f49fb {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.206946] env[69367]: DEBUG oslo_vmware.api [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Waiting for the task: (returnval){ [ 746.206946] env[69367]: value = "task-4233945" [ 746.206946] env[69367]: _type = "Task" [ 746.206946] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.220779] env[69367]: DEBUG oslo_vmware.api [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Task: {'id': task-4233945, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.237929] env[69367]: INFO nova.scheduler.client.report [None req-6d12701b-472c-4f21-b3e9-0df43501af60 tempest-ServerActionsV293TestJSON-1803979304 tempest-ServerActionsV293TestJSON-1803979304-project-member] Deleted allocations for instance e6a9b69c-e00d-4260-84c7-2d98ce80ead0 [ 746.417251] env[69367]: DEBUG oslo_vmware.api [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52bca2fa-2ae0-4759-2f99-20bc0ab3b3cf, 'name': SearchDatastore_Task, 'duration_secs': 0.012931} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.418428] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-17002796-2b7d-407d-acae-24f0e6186a1c {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.426735] env[69367]: DEBUG oslo_vmware.api [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Waiting for the task: (returnval){ [ 746.426735] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]529bd568-381e-f62b-89fc-a06ccf725db0" [ 746.426735] env[69367]: _type = "Task" [ 746.426735] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.435718] env[69367]: DEBUG oslo_vmware.api [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]529bd568-381e-f62b-89fc-a06ccf725db0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.537441] env[69367]: DEBUG oslo_concurrency.lockutils [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 746.564023] env[69367]: DEBUG nova.network.neutron [None req-43e02186-5124-43e9-925d-8618549710d6 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] Updating instance_info_cache with network_info: [{"id": "b4b0bb3d-a7c4-42e8-99f2-9078bf5e2b6c", "address": "fa:16:3e:6c:36:d9", "network": {"id": "32a4b064-9027-4a4e-9230-c450c44581e8", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1766727980-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "82caa54483a54af1870eab2fb0d6ca2c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "88651df2-0506-4f6c-b868-dd30a81f2b1c", "external-id": "nsx-vlan-transportzone-366", "segmentation_id": 366, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4b0bb3d-a7", "ovs_interfaceid": "b4b0bb3d-a7c4-42e8-99f2-9078bf5e2b6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 746.719786] env[69367]: DEBUG oslo_vmware.api [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Task: {'id': task-4233945, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.749743] env[69367]: DEBUG oslo_concurrency.lockutils [None req-6d12701b-472c-4f21-b3e9-0df43501af60 tempest-ServerActionsV293TestJSON-1803979304 tempest-ServerActionsV293TestJSON-1803979304-project-member] Lock "e6a9b69c-e00d-4260-84c7-2d98ce80ead0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 46.179s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 746.951419] env[69367]: DEBUG oslo_vmware.api [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]529bd568-381e-f62b-89fc-a06ccf725db0, 'name': SearchDatastore_Task, 'duration_secs': 0.018345} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.951717] env[69367]: DEBUG oslo_concurrency.lockutils [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 746.952280] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore2] 011ab7de-98a7-41fc-9e05-e71965c73c09/011ab7de-98a7-41fc-9e05-e71965c73c09.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 746.952499] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-74b92868-230d-44c3-a0ed-12617debd3de {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.959756] env[69367]: DEBUG oslo_vmware.api [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Waiting for the task: (returnval){ [ 746.959756] env[69367]: value = "task-4233946" [ 746.959756] env[69367]: _type = "Task" [ 746.959756] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.969103] env[69367]: DEBUG oslo_vmware.api [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Task: {'id': task-4233946, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.073054] env[69367]: DEBUG oslo_concurrency.lockutils [None req-43e02186-5124-43e9-925d-8618549710d6 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Releasing lock "refresh_cache-f8c07fa1-d27c-4d0f-847b-481477cd04bf" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 747.224282] env[69367]: DEBUG oslo_vmware.api [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Task: {'id': task-4233945, 'name': PowerOnVM_Task, 'duration_secs': 0.540454} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.224739] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Powered on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 747.224848] env[69367]: INFO nova.compute.manager [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Took 8.43 seconds to spawn the instance on the hypervisor. [ 747.225119] env[69367]: DEBUG nova.compute.manager [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Checking state {{(pid=69367) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 747.226226] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d92eade-3b39-462b-894f-13601fc58e68 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.253241] env[69367]: DEBUG nova.compute.manager [None req-109a7ce6-4940-416c-adf7-78a1c5318ddc tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 63b3fceb-2a10-4626-a09d-5943535ad98c] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 747.476406] env[69367]: DEBUG oslo_vmware.api [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Task: {'id': task-4233946, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.568803] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance a358ce6d-9826-4ddb-8c2f-51bac8db59d4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 747.569222] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 4e346ed1-36e9-421d-975f-e8bb6f05c0a0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 747.569593] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance e1c7d100-4ad7-4871-970f-bb7562bfc6fc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 747.575087] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 747.575087] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance ab365570-ac29-4094-be4c-d49563a465c8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 747.575087] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance c17525ee-d038-4c81-932b-ed74a6de6cb5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 747.575087] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 92c27615-d377-492f-a9db-ff45b2e71537 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 747.575259] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance d2f8328d-fd05-4e63-9cbd-a6e3ec948964 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 747.575259] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 747.575259] env[69367]: WARNING nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance bdc0938b-60ef-463a-b3fd-1754f38a3b79 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 747.575259] env[69367]: WARNING nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 788b843c-1496-4562-a761-44f3e1ce6da2 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 747.575386] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 011ab7de-98a7-41fc-9e05-e71965c73c09 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 747.575386] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance f8c07fa1-d27c-4d0f-847b-481477cd04bf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 747.575386] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 837b4093-308b-440b-940d-fc0227a5c590 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 747.753653] env[69367]: INFO nova.compute.manager [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Took 33.08 seconds to build instance. [ 747.778834] env[69367]: DEBUG oslo_concurrency.lockutils [None req-109a7ce6-4940-416c-adf7-78a1c5318ddc tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 747.972996] env[69367]: DEBUG oslo_vmware.api [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Task: {'id': task-4233946, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.5335} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.973455] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore2] 011ab7de-98a7-41fc-9e05-e71965c73c09/011ab7de-98a7-41fc-9e05-e71965c73c09.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 747.975617] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] Extending root virtual disk to 1048576 {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 747.975617] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fada7a0a-3ee4-4710-8b34-c862c2da0094 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.982049] env[69367]: DEBUG oslo_vmware.api [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Waiting for the task: (returnval){ [ 747.982049] env[69367]: value = "task-4233948" [ 747.982049] env[69367]: _type = "Task" [ 747.982049] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.992132] env[69367]: DEBUG oslo_vmware.api [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Task: {'id': task-4233948, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.078010] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 10419c72-9876-45d3-a941-46464b47fddc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 748.259266] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fa32840b-b5e6-49ce-89e8-6324af525819 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Lock "837b4093-308b-440b-940d-fc0227a5c590" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.291s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 748.487786] env[69367]: DEBUG oslo_concurrency.lockutils [None req-3172fd0f-e37a-4da4-b881-902c58437d42 tempest-ServerMetadataNegativeTestJSON-995045374 tempest-ServerMetadataNegativeTestJSON-995045374-project-member] Acquiring lock "ac440ec4-8b1a-465a-a84d-66e8c823836b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 748.488085] env[69367]: DEBUG oslo_concurrency.lockutils [None req-3172fd0f-e37a-4da4-b881-902c58437d42 tempest-ServerMetadataNegativeTestJSON-995045374 tempest-ServerMetadataNegativeTestJSON-995045374-project-member] Lock "ac440ec4-8b1a-465a-a84d-66e8c823836b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 748.501251] env[69367]: DEBUG oslo_vmware.api [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Task: {'id': task-4233948, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074263} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.501539] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] Extended root virtual disk {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 748.502412] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eb17b56-b976-4b6d-a77d-0133e65e7bca {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.527763] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] Reconfiguring VM instance instance-0000002a to attach disk [datastore2] 011ab7de-98a7-41fc-9e05-e71965c73c09/011ab7de-98a7-41fc-9e05-e71965c73c09.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 748.528127] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bad7a218-e745-41b0-8478-62fbd16c8307 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.550645] env[69367]: DEBUG oslo_vmware.api [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Waiting for the task: (returnval){ [ 748.550645] env[69367]: value = "task-4233949" [ 748.550645] env[69367]: _type = "Task" [ 748.550645] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.562253] env[69367]: DEBUG oslo_vmware.api [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Task: {'id': task-4233949, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.583040] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 32ad9bbe-f92c-488d-a98a-d28bbfe8293f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 748.628150] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-43e02186-5124-43e9-925d-8618549710d6 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] Powering off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 748.628455] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-210734cd-c6a3-4124-a21f-878c601ac0f0 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.636054] env[69367]: DEBUG oslo_vmware.api [None req-43e02186-5124-43e9-925d-8618549710d6 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Waiting for the task: (returnval){ [ 748.636054] env[69367]: value = "task-4233950" [ 748.636054] env[69367]: _type = "Task" [ 748.636054] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.649019] env[69367]: DEBUG oslo_vmware.api [None req-43e02186-5124-43e9-925d-8618549710d6 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Task: {'id': task-4233950, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.763121] env[69367]: DEBUG nova.compute.manager [None req-9f96bab7-a5b1-41bd-b4f3-530a3a59f53f tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] [instance: 022ca95b-30cc-41f1-be48-51fdfe1f0b14] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 749.066384] env[69367]: DEBUG oslo_vmware.api [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Task: {'id': task-4233949, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.090754] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance e4db7bcc-26dd-4f0d-80da-655a58c80783 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 749.151627] env[69367]: DEBUG oslo_vmware.api [None req-43e02186-5124-43e9-925d-8618549710d6 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Task: {'id': task-4233950, 'name': PowerOffVM_Task, 'duration_secs': 0.214692} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.152274] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-43e02186-5124-43e9-925d-8618549710d6 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] Powered off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 749.154510] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca7fc565-81d7-4343-ab53-8ee466de6ffa {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.183440] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2135bf25-7cce-49be-aedd-eccb7ef66c58 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.229320] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-43e02186-5124-43e9-925d-8618549710d6 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] Powering off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 749.229630] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-669eb62e-7551-426c-b43c-986db068958a {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.237703] env[69367]: DEBUG oslo_vmware.api [None req-43e02186-5124-43e9-925d-8618549710d6 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Waiting for the task: (returnval){ [ 749.237703] env[69367]: value = "task-4233951" [ 749.237703] env[69367]: _type = "Task" [ 749.237703] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.240466] env[69367]: DEBUG nova.compute.manager [req-2eae7805-bd82-4a80-8372-10b4c53cfdb7 req-456257b1-1a0b-4243-8866-93e074c298e8 service nova] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Received event network-changed-00ad3cfd-f282-442d-b152-85e841dd8a16 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 749.240603] env[69367]: DEBUG nova.compute.manager [req-2eae7805-bd82-4a80-8372-10b4c53cfdb7 req-456257b1-1a0b-4243-8866-93e074c298e8 service nova] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Refreshing instance network info cache due to event network-changed-00ad3cfd-f282-442d-b152-85e841dd8a16. {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11772}} [ 749.240843] env[69367]: DEBUG oslo_concurrency.lockutils [req-2eae7805-bd82-4a80-8372-10b4c53cfdb7 req-456257b1-1a0b-4243-8866-93e074c298e8 service nova] Acquiring lock "refresh_cache-837b4093-308b-440b-940d-fc0227a5c590" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 749.240986] env[69367]: DEBUG oslo_concurrency.lockutils [req-2eae7805-bd82-4a80-8372-10b4c53cfdb7 req-456257b1-1a0b-4243-8866-93e074c298e8 service nova] Acquired lock "refresh_cache-837b4093-308b-440b-940d-fc0227a5c590" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 749.241158] env[69367]: DEBUG nova.network.neutron [req-2eae7805-bd82-4a80-8372-10b4c53cfdb7 req-456257b1-1a0b-4243-8866-93e074c298e8 service nova] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Refreshing network info cache for port 00ad3cfd-f282-442d-b152-85e841dd8a16 {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 749.255240] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-43e02186-5124-43e9-925d-8618549710d6 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] VM already powered off {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 749.255488] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-43e02186-5124-43e9-925d-8618549710d6 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] Processing image 2b099420-9152-4d93-9609-4c9317824c11 {{(pid=69367) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 749.255768] env[69367]: DEBUG oslo_concurrency.lockutils [None req-43e02186-5124-43e9-925d-8618549710d6 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 749.255930] env[69367]: DEBUG oslo_concurrency.lockutils [None req-43e02186-5124-43e9-925d-8618549710d6 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 749.256213] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-43e02186-5124-43e9-925d-8618549710d6 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 749.256765] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c5f843fb-40c6-4d42-9cf5-182f07ec3c79 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.273496] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-43e02186-5124-43e9-925d-8618549710d6 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 749.273812] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-43e02186-5124-43e9-925d-8618549710d6 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69367) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 749.278904] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d008348b-d699-4376-8bc8-6b68eb1ddee9 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.286225] env[69367]: DEBUG oslo_vmware.api [None req-43e02186-5124-43e9-925d-8618549710d6 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Waiting for the task: (returnval){ [ 749.286225] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]5246c146-7221-a15b-1cf3-d190bb41539f" [ 749.286225] env[69367]: _type = "Task" [ 749.286225] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.295325] env[69367]: DEBUG oslo_concurrency.lockutils [None req-9f96bab7-a5b1-41bd-b4f3-530a3a59f53f tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 749.303675] env[69367]: DEBUG oslo_vmware.api [None req-43e02186-5124-43e9-925d-8618549710d6 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]5246c146-7221-a15b-1cf3-d190bb41539f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.564781] env[69367]: DEBUG oslo_vmware.api [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Task: {'id': task-4233949, 'name': ReconfigVM_Task, 'duration_secs': 0.568458} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.564781] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] Reconfigured VM instance instance-0000002a to attach disk [datastore2] 011ab7de-98a7-41fc-9e05-e71965c73c09/011ab7de-98a7-41fc-9e05-e71965c73c09.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 749.565453] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7be8fc77-16cb-4ae6-be0e-bbf70dfc8ef4 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.573116] env[69367]: DEBUG oslo_vmware.api [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Waiting for the task: (returnval){ [ 749.573116] env[69367]: value = "task-4233952" [ 749.573116] env[69367]: _type = "Task" [ 749.573116] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.582963] env[69367]: DEBUG oslo_vmware.api [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Task: {'id': task-4233952, 'name': Rename_Task} progress is 5%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.595561] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 40e49f7b-e5f7-4673-a764-d8cec8a3cf18 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 749.797944] env[69367]: DEBUG oslo_vmware.api [None req-43e02186-5124-43e9-925d-8618549710d6 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]5246c146-7221-a15b-1cf3-d190bb41539f, 'name': SearchDatastore_Task, 'duration_secs': 0.017693} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.803616] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-34920b98-731b-4fb5-8203-afe0335c9db3 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.809270] env[69367]: DEBUG oslo_vmware.api [None req-43e02186-5124-43e9-925d-8618549710d6 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Waiting for the task: (returnval){ [ 749.809270] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52b88d0e-77f6-d3c1-8db5-13a1a04eb84d" [ 749.809270] env[69367]: _type = "Task" [ 749.809270] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.817946] env[69367]: DEBUG oslo_vmware.api [None req-43e02186-5124-43e9-925d-8618549710d6 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52b88d0e-77f6-d3c1-8db5-13a1a04eb84d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.041814] env[69367]: DEBUG nova.network.neutron [req-2eae7805-bd82-4a80-8372-10b4c53cfdb7 req-456257b1-1a0b-4243-8866-93e074c298e8 service nova] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Updated VIF entry in instance network info cache for port 00ad3cfd-f282-442d-b152-85e841dd8a16. {{(pid=69367) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 750.041814] env[69367]: DEBUG nova.network.neutron [req-2eae7805-bd82-4a80-8372-10b4c53cfdb7 req-456257b1-1a0b-4243-8866-93e074c298e8 service nova] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Updating instance_info_cache with network_info: [{"id": "00ad3cfd-f282-442d-b152-85e841dd8a16", "address": "fa:16:3e:17:02:f1", "network": {"id": "a8e5c1a6-2526-4042-8a8d-fdb54dbe7bf9", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1429177141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.254", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "f5bc3d470905412ea72a8eedb98e9e47", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e6f11c0d-c73a-47f5-b02e-47bff48da0e4", "external-id": "nsx-vlan-transportzone-345", "segmentation_id": 345, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap00ad3cfd-f2", "ovs_interfaceid": "00ad3cfd-f282-442d-b152-85e841dd8a16", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 750.085877] env[69367]: DEBUG oslo_vmware.api [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Task: {'id': task-4233952, 'name': Rename_Task, 'duration_secs': 0.178699} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.086367] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] Powering on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 750.086782] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-44e7fc6a-bf58-4982-a878-3587683d8070 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.097019] env[69367]: DEBUG oslo_vmware.api [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Waiting for the task: (returnval){ [ 750.097019] env[69367]: value = "task-4233953" [ 750.097019] env[69367]: _type = "Task" [ 750.097019] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.101702] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance d6c2606d-0c6c-4add-b6f5-8229c21b56be has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 750.112689] env[69367]: DEBUG oslo_vmware.api [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Task: {'id': task-4233953, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.318100] env[69367]: DEBUG oslo_vmware.api [None req-43e02186-5124-43e9-925d-8618549710d6 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52b88d0e-77f6-d3c1-8db5-13a1a04eb84d, 'name': SearchDatastore_Task, 'duration_secs': 0.018074} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.318418] env[69367]: DEBUG oslo_concurrency.lockutils [None req-43e02186-5124-43e9-925d-8618549710d6 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 750.318635] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-43e02186-5124-43e9-925d-8618549710d6 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore2] f8c07fa1-d27c-4d0f-847b-481477cd04bf/2b099420-9152-4d93-9609-4c9317824c11-rescue.vmdk. {{(pid=69367) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 750.318966] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cbc36296-760e-477f-8e43-556d188a8064 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.327520] env[69367]: DEBUG oslo_vmware.api [None req-43e02186-5124-43e9-925d-8618549710d6 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Waiting for the task: (returnval){ [ 750.327520] env[69367]: value = "task-4233954" [ 750.327520] env[69367]: _type = "Task" [ 750.327520] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.337284] env[69367]: DEBUG oslo_vmware.api [None req-43e02186-5124-43e9-925d-8618549710d6 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Task: {'id': task-4233954, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.544842] env[69367]: DEBUG oslo_concurrency.lockutils [req-2eae7805-bd82-4a80-8372-10b4c53cfdb7 req-456257b1-1a0b-4243-8866-93e074c298e8 service nova] Releasing lock "refresh_cache-837b4093-308b-440b-940d-fc0227a5c590" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 750.608036] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 7f937d89-684b-44f5-9f30-783aeafe99d1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 750.609475] env[69367]: DEBUG oslo_vmware.api [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Task: {'id': task-4233953, 'name': PowerOnVM_Task, 'duration_secs': 0.453081} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.609727] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] Powered on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 750.611130] env[69367]: DEBUG nova.compute.manager [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] Checking state {{(pid=69367) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 750.611130] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd22d132-ba25-411d-a394-f70e9e73540b {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.842054] env[69367]: DEBUG oslo_vmware.api [None req-43e02186-5124-43e9-925d-8618549710d6 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Task: {'id': task-4233954, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.115974] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance ab9d8e3e-65c5-4ac9-920f-3042b8cf2054 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 751.129778] env[69367]: DEBUG oslo_concurrency.lockutils [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 751.339386] env[69367]: DEBUG oslo_vmware.api [None req-43e02186-5124-43e9-925d-8618549710d6 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Task: {'id': task-4233954, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.646628} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.339685] env[69367]: INFO nova.virt.vmwareapi.ds_util [None req-43e02186-5124-43e9-925d-8618549710d6 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore2] f8c07fa1-d27c-4d0f-847b-481477cd04bf/2b099420-9152-4d93-9609-4c9317824c11-rescue.vmdk. [ 751.340700] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d031e4b2-7fad-436b-8ebe-9bb2e0d24535 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.366193] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-43e02186-5124-43e9-925d-8618549710d6 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] Reconfiguring VM instance instance-0000002b to attach disk [datastore2] f8c07fa1-d27c-4d0f-847b-481477cd04bf/2b099420-9152-4d93-9609-4c9317824c11-rescue.vmdk or device None with type thin {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 751.366496] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-42419c1a-a925-43b8-88f5-7b09ab53e880 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.385361] env[69367]: DEBUG oslo_vmware.api [None req-43e02186-5124-43e9-925d-8618549710d6 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Waiting for the task: (returnval){ [ 751.385361] env[69367]: value = "task-4233955" [ 751.385361] env[69367]: _type = "Task" [ 751.385361] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.394568] env[69367]: DEBUG oslo_vmware.api [None req-43e02186-5124-43e9-925d-8618549710d6 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Task: {'id': task-4233955, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.493535] env[69367]: DEBUG oslo_concurrency.lockutils [None req-320c0afb-579c-4ab6-a9bf-6526f9c412f0 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Acquiring lock "011ab7de-98a7-41fc-9e05-e71965c73c09" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 751.494217] env[69367]: DEBUG oslo_concurrency.lockutils [None req-320c0afb-579c-4ab6-a9bf-6526f9c412f0 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Lock "011ab7de-98a7-41fc-9e05-e71965c73c09" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 751.494435] env[69367]: DEBUG oslo_concurrency.lockutils [None req-320c0afb-579c-4ab6-a9bf-6526f9c412f0 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Acquiring lock "011ab7de-98a7-41fc-9e05-e71965c73c09-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 751.494645] env[69367]: DEBUG oslo_concurrency.lockutils [None req-320c0afb-579c-4ab6-a9bf-6526f9c412f0 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Lock "011ab7de-98a7-41fc-9e05-e71965c73c09-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 751.494820] env[69367]: DEBUG oslo_concurrency.lockutils [None req-320c0afb-579c-4ab6-a9bf-6526f9c412f0 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Lock "011ab7de-98a7-41fc-9e05-e71965c73c09-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 751.497156] env[69367]: INFO nova.compute.manager [None req-320c0afb-579c-4ab6-a9bf-6526f9c412f0 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] Terminating instance [ 751.619324] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 557dc011-44a1-4240-9596-d055d57e176f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 751.896208] env[69367]: DEBUG oslo_vmware.api [None req-43e02186-5124-43e9-925d-8618549710d6 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Task: {'id': task-4233955, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.000680] env[69367]: DEBUG oslo_concurrency.lockutils [None req-320c0afb-579c-4ab6-a9bf-6526f9c412f0 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Acquiring lock "refresh_cache-011ab7de-98a7-41fc-9e05-e71965c73c09" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 752.000880] env[69367]: DEBUG oslo_concurrency.lockutils [None req-320c0afb-579c-4ab6-a9bf-6526f9c412f0 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Acquired lock "refresh_cache-011ab7de-98a7-41fc-9e05-e71965c73c09" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 752.001082] env[69367]: DEBUG nova.network.neutron [None req-320c0afb-579c-4ab6-a9bf-6526f9c412f0 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 752.122646] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 63b3fceb-2a10-4626-a09d-5943535ad98c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 752.400020] env[69367]: DEBUG oslo_vmware.api [None req-43e02186-5124-43e9-925d-8618549710d6 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Task: {'id': task-4233955, 'name': ReconfigVM_Task, 'duration_secs': 0.787132} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.400020] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-43e02186-5124-43e9-925d-8618549710d6 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] Reconfigured VM instance instance-0000002b to attach disk [datastore2] f8c07fa1-d27c-4d0f-847b-481477cd04bf/2b099420-9152-4d93-9609-4c9317824c11-rescue.vmdk or device None with type thin {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 752.400020] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fef4f2ea-c04f-4242-810d-2ac028dd3c57 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.432534] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e5a6cab8-6a6c-48be-a5e6-9f28516e0447 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.450373] env[69367]: DEBUG oslo_vmware.api [None req-43e02186-5124-43e9-925d-8618549710d6 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Waiting for the task: (returnval){ [ 752.450373] env[69367]: value = "task-4233956" [ 752.450373] env[69367]: _type = "Task" [ 752.450373] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.459614] env[69367]: DEBUG oslo_vmware.api [None req-43e02186-5124-43e9-925d-8618549710d6 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Task: {'id': task-4233956, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.523947] env[69367]: DEBUG nova.network.neutron [None req-320c0afb-579c-4ab6-a9bf-6526f9c412f0 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 752.581354] env[69367]: DEBUG nova.network.neutron [None req-320c0afb-579c-4ab6-a9bf-6526f9c412f0 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 752.631443] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 022ca95b-30cc-41f1-be48-51fdfe1f0b14 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 752.961661] env[69367]: DEBUG oslo_vmware.api [None req-43e02186-5124-43e9-925d-8618549710d6 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Task: {'id': task-4233956, 'name': ReconfigVM_Task, 'duration_secs': 0.177245} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.962028] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-43e02186-5124-43e9-925d-8618549710d6 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] Powering on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 752.962309] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f24b1a50-0d75-40fc-8074-330596d26c56 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.969812] env[69367]: DEBUG oslo_vmware.api [None req-43e02186-5124-43e9-925d-8618549710d6 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Waiting for the task: (returnval){ [ 752.969812] env[69367]: value = "task-4233957" [ 752.969812] env[69367]: _type = "Task" [ 752.969812] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.979827] env[69367]: DEBUG oslo_vmware.api [None req-43e02186-5124-43e9-925d-8618549710d6 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Task: {'id': task-4233957, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.085676] env[69367]: DEBUG oslo_concurrency.lockutils [None req-320c0afb-579c-4ab6-a9bf-6526f9c412f0 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Releasing lock "refresh_cache-011ab7de-98a7-41fc-9e05-e71965c73c09" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 753.085676] env[69367]: DEBUG nova.compute.manager [None req-320c0afb-579c-4ab6-a9bf-6526f9c412f0 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] Start destroying the instance on the hypervisor. {{(pid=69367) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 753.085676] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-320c0afb-579c-4ab6-a9bf-6526f9c412f0 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] Destroying instance {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 753.087378] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3353ded-4ea0-4cfe-adc2-9bc0c3b9c6e5 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.096703] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-320c0afb-579c-4ab6-a9bf-6526f9c412f0 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] Powering off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 753.098513] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-735825c9-2d66-4910-b862-8bc3447647b0 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.106486] env[69367]: DEBUG oslo_vmware.api [None req-320c0afb-579c-4ab6-a9bf-6526f9c412f0 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Waiting for the task: (returnval){ [ 753.106486] env[69367]: value = "task-4233958" [ 753.106486] env[69367]: _type = "Task" [ 753.106486] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.122526] env[69367]: DEBUG oslo_vmware.api [None req-320c0afb-579c-4ab6-a9bf-6526f9c412f0 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Task: {'id': task-4233958, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.134504] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance f3386485-a173-4f5d-8f29-4972df3ae468 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 753.488743] env[69367]: DEBUG oslo_vmware.api [None req-43e02186-5124-43e9-925d-8618549710d6 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Task: {'id': task-4233957, 'name': PowerOnVM_Task, 'duration_secs': 0.438091} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.488743] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-43e02186-5124-43e9-925d-8618549710d6 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] Powered on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 753.492980] env[69367]: DEBUG nova.compute.manager [None req-43e02186-5124-43e9-925d-8618549710d6 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] Checking state {{(pid=69367) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 753.493732] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1e05bad-b492-408a-ad79-b75acb224b4a {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.623239] env[69367]: DEBUG oslo_vmware.api [None req-320c0afb-579c-4ab6-a9bf-6526f9c412f0 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Task: {'id': task-4233958, 'name': PowerOffVM_Task, 'duration_secs': 0.122565} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.623239] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-320c0afb-579c-4ab6-a9bf-6526f9c412f0 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] Powered off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 753.623239] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-320c0afb-579c-4ab6-a9bf-6526f9c412f0 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] Unregistering the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 753.623239] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7973e2ae-94e2-4915-957f-8484e6968e7d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.638130] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 236173c7-9464-44b5-83a5-6ff60eedcc6a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 753.650603] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-320c0afb-579c-4ab6-a9bf-6526f9c412f0 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] Unregistered the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 753.650836] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-320c0afb-579c-4ab6-a9bf-6526f9c412f0 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] Deleting contents of the VM from datastore datastore2 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 753.651032] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-320c0afb-579c-4ab6-a9bf-6526f9c412f0 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Deleting the datastore file [datastore2] 011ab7de-98a7-41fc-9e05-e71965c73c09 {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 753.651318] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ea196015-d7b0-4712-b0e6-ad57e4adf071 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.660826] env[69367]: DEBUG oslo_vmware.api [None req-320c0afb-579c-4ab6-a9bf-6526f9c412f0 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Waiting for the task: (returnval){ [ 753.660826] env[69367]: value = "task-4233960" [ 753.660826] env[69367]: _type = "Task" [ 753.660826] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.673756] env[69367]: DEBUG oslo_vmware.api [None req-320c0afb-579c-4ab6-a9bf-6526f9c412f0 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Task: {'id': task-4233960, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.007646] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5687842f-f711-41da-ac00-7ef2110513fe tempest-ServerRescueTestJSONUnderV235-356240681 tempest-ServerRescueTestJSONUnderV235-356240681-project-member] Acquiring lock "b6d326ff-45aa-44b6-b99c-95edca647e2c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 754.007958] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5687842f-f711-41da-ac00-7ef2110513fe tempest-ServerRescueTestJSONUnderV235-356240681 tempest-ServerRescueTestJSONUnderV235-356240681-project-member] Lock "b6d326ff-45aa-44b6-b99c-95edca647e2c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 754.143394] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 8ee84a56-cc49-4056-b561-aa1f2b10a06c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 754.173893] env[69367]: DEBUG oslo_vmware.api [None req-320c0afb-579c-4ab6-a9bf-6526f9c412f0 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Task: {'id': task-4233960, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.111191} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.174180] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-320c0afb-579c-4ab6-a9bf-6526f9c412f0 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Deleted the datastore file {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 754.174374] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-320c0afb-579c-4ab6-a9bf-6526f9c412f0 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] Deleted contents of the VM from datastore datastore2 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 754.174555] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-320c0afb-579c-4ab6-a9bf-6526f9c412f0 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] Instance destroyed {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 754.174726] env[69367]: INFO nova.compute.manager [None req-320c0afb-579c-4ab6-a9bf-6526f9c412f0 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] Took 1.09 seconds to destroy the instance on the hypervisor. [ 754.174976] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-320c0afb-579c-4ab6-a9bf-6526f9c412f0 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 754.175180] env[69367]: DEBUG nova.compute.manager [-] [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 754.175283] env[69367]: DEBUG nova.network.neutron [-] [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 754.198341] env[69367]: DEBUG nova.network.neutron [-] [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 754.647390] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance d7009e78-b9f4-47e8-ba29-dfc710bef8ad has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 754.702597] env[69367]: DEBUG nova.network.neutron [-] [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 755.155048] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance f0522b69-b593-404e-8f24-b6c5c6c8b2e4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 755.164486] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2b7dd111-b8dc-4eab-92ec-fa522d44b4d5 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Acquiring lock "eab70948-bb67-4f56-9f35-65e164fd5990" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 755.164974] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2b7dd111-b8dc-4eab-92ec-fa522d44b4d5 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Lock "eab70948-bb67-4f56-9f35-65e164fd5990" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 755.205796] env[69367]: INFO nova.compute.manager [-] [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] Took 1.03 seconds to deallocate network for instance. [ 755.657821] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 3f6a67a9-08db-4a15-ae07-bef02b9a6d48 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 755.713618] env[69367]: DEBUG oslo_concurrency.lockutils [None req-320c0afb-579c-4ab6-a9bf-6526f9c412f0 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 756.160653] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 17ffa2b1-4a0a-4e14-a7b0-104791adf072 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 756.161024] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Instance with task_state "unshelving" is not being actively managed by this compute host but has allocations referencing this compute node (19ddf8be-7305-4f70-8366-52a9957232e6): {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocations during the task state transition. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1810}} [ 756.664686] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance dd598b7a-057f-48ea-a31e-96e7ccadeb3d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 756.665339] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Total usable vcpus: 48, total allocated vcpus: 13 {{(pid=69367) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 756.665339] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3008MB phys_disk=200GB used_disk=12GB total_vcpus=48 used_vcpus=13 pci_stats=[] {{(pid=69367) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 756.684117] env[69367]: DEBUG nova.scheduler.client.report [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 756.699292] env[69367]: DEBUG nova.scheduler.client.report [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 756.699501] env[69367]: DEBUG nova.compute.provider_tree [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 756.711120] env[69367]: DEBUG nova.scheduler.client.report [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 756.731534] env[69367]: DEBUG nova.scheduler.client.report [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 756.860877] env[69367]: DEBUG oslo_concurrency.lockutils [None req-eccf0c79-5df1-4484-9bf6-63438aa251d1 tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Acquiring lock "d2f8328d-fd05-4e63-9cbd-a6e3ec948964" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 756.861137] env[69367]: DEBUG oslo_concurrency.lockutils [None req-eccf0c79-5df1-4484-9bf6-63438aa251d1 tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Lock "d2f8328d-fd05-4e63-9cbd-a6e3ec948964" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 757.209908] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4aba1749-90dc-4f08-b6c8-b4f2bb1cd0db {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.218534] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-200f0474-f5b6-46e4-95b7-25da8a22e040 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.250863] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4528d5f4-9a2a-40a5-98da-8547084cfea2 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.259937] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a351e5df-a930-44b9-b792-938e6bccc53e {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.274026] env[69367]: DEBUG nova.compute.provider_tree [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 757.365748] env[69367]: DEBUG nova.compute.utils [None req-eccf0c79-5df1-4484-9bf6-63438aa251d1 tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Using /dev/sd instead of None {{(pid=69367) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 757.797548] env[69367]: ERROR nova.scheduler.client.report [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] [req-08b8fb3c-4130-465c-9791-d75a4e481127] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-08b8fb3c-4130-465c-9791-d75a4e481127"}]} [ 757.798111] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 14.831s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 757.798966] env[69367]: ERROR nova.compute.manager [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Error updating resources for node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28.: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 757.798966] env[69367]: ERROR nova.compute.manager Traceback (most recent call last): [ 757.798966] env[69367]: ERROR nova.compute.manager File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 757.798966] env[69367]: ERROR nova.compute.manager yield [ 757.798966] env[69367]: ERROR nova.compute.manager File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 757.798966] env[69367]: ERROR nova.compute.manager self.set_inventory_for_provider( [ 757.798966] env[69367]: ERROR nova.compute.manager File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 757.798966] env[69367]: ERROR nova.compute.manager raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 757.798966] env[69367]: ERROR nova.compute.manager nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-08b8fb3c-4130-465c-9791-d75a4e481127"}]} [ 757.798966] env[69367]: ERROR nova.compute.manager [ 757.798966] env[69367]: ERROR nova.compute.manager During handling of the above exception, another exception occurred: [ 757.798966] env[69367]: ERROR nova.compute.manager [ 757.800178] env[69367]: ERROR nova.compute.manager Traceback (most recent call last): [ 757.800178] env[69367]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 11219, in _update_available_resource_for_node [ 757.800178] env[69367]: ERROR nova.compute.manager self.rt.update_available_resource(context, nodename, [ 757.800178] env[69367]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/resource_tracker.py", line 965, in update_available_resource [ 757.800178] env[69367]: ERROR nova.compute.manager self._update_available_resource(context, resources, startup=startup) [ 757.800178] env[69367]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 757.800178] env[69367]: ERROR nova.compute.manager return f(*args, **kwargs) [ 757.800178] env[69367]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1096, in _update_available_resource [ 757.800178] env[69367]: ERROR nova.compute.manager self._update(context, cn, startup=startup) [ 757.800178] env[69367]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 757.800178] env[69367]: ERROR nova.compute.manager self._update_to_placement(context, compute_node, startup) [ 757.800178] env[69367]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 757.800178] env[69367]: ERROR nova.compute.manager return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 757.800178] env[69367]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 757.800178] env[69367]: ERROR nova.compute.manager return attempt.get(self._wrap_exception) [ 757.800178] env[69367]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 757.800178] env[69367]: ERROR nova.compute.manager six.reraise(self.value[0], self.value[1], self.value[2]) [ 757.800178] env[69367]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 757.800649] env[69367]: ERROR nova.compute.manager raise value [ 757.800649] env[69367]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 757.800649] env[69367]: ERROR nova.compute.manager attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 757.800649] env[69367]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 757.800649] env[69367]: ERROR nova.compute.manager self.reportclient.update_from_provider_tree( [ 757.800649] env[69367]: ERROR nova.compute.manager File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 757.800649] env[69367]: ERROR nova.compute.manager with catch_all(pd.uuid): [ 757.800649] env[69367]: ERROR nova.compute.manager File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 757.800649] env[69367]: ERROR nova.compute.manager self.gen.throw(typ, value, traceback) [ 757.800649] env[69367]: ERROR nova.compute.manager File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 757.800649] env[69367]: ERROR nova.compute.manager raise exception.ResourceProviderSyncFailed() [ 757.800649] env[69367]: ERROR nova.compute.manager nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 757.800649] env[69367]: ERROR nova.compute.manager [ 757.800649] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5256b69c-c957-4645-bf1f-f7ae1efeab28 tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.691s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 757.801025] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5256b69c-c957-4645-bf1f-f7ae1efeab28 tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 757.802725] env[69367]: DEBUG oslo_concurrency.lockutils [None req-89f1736f-ef76-4bc5-98e1-36367045736e tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 37.210s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 757.805816] env[69367]: INFO nova.compute.claims [None req-89f1736f-ef76-4bc5-98e1-36367045736e tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: 10419c72-9876-45d3-a941-46464b47fddc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 757.832467] env[69367]: INFO nova.scheduler.client.report [None req-5256b69c-c957-4645-bf1f-f7ae1efeab28 tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Deleted allocations for instance bdc0938b-60ef-463a-b3fd-1754f38a3b79 [ 757.868750] env[69367]: DEBUG oslo_concurrency.lockutils [None req-eccf0c79-5df1-4484-9bf6-63438aa251d1 tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Lock "d2f8328d-fd05-4e63-9cbd-a6e3ec948964" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 758.343479] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5256b69c-c957-4645-bf1f-f7ae1efeab28 tempest-ServerPasswordTestJSON-287328885 tempest-ServerPasswordTestJSON-287328885-project-member] Lock "bdc0938b-60ef-463a-b3fd-1754f38a3b79" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.048s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 758.833741] env[69367]: DEBUG nova.scheduler.client.report [None req-89f1736f-ef76-4bc5-98e1-36367045736e tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 758.848169] env[69367]: DEBUG nova.scheduler.client.report [None req-89f1736f-ef76-4bc5-98e1-36367045736e tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 758.848437] env[69367]: DEBUG nova.compute.provider_tree [None req-89f1736f-ef76-4bc5-98e1-36367045736e tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 758.859899] env[69367]: DEBUG nova.scheduler.client.report [None req-89f1736f-ef76-4bc5-98e1-36367045736e tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 758.879612] env[69367]: DEBUG nova.scheduler.client.report [None req-89f1736f-ef76-4bc5-98e1-36367045736e tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 758.923091] env[69367]: DEBUG oslo_concurrency.lockutils [None req-eccf0c79-5df1-4484-9bf6-63438aa251d1 tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Acquiring lock "d2f8328d-fd05-4e63-9cbd-a6e3ec948964" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 758.923364] env[69367]: DEBUG oslo_concurrency.lockutils [None req-eccf0c79-5df1-4484-9bf6-63438aa251d1 tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Lock "d2f8328d-fd05-4e63-9cbd-a6e3ec948964" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 758.923604] env[69367]: INFO nova.compute.manager [None req-eccf0c79-5df1-4484-9bf6-63438aa251d1 tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Attaching volume 0572f679-bc16-40f7-b698-92ec30d0b913 to /dev/sdb [ 758.963681] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad3260cc-901b-4f87-ae0d-d6adb53914f9 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.973782] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67b24a88-5b10-430f-8d16-6a46fe26bb37 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.989047] env[69367]: DEBUG nova.virt.block_device [None req-eccf0c79-5df1-4484-9bf6-63438aa251d1 tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Updating existing volume attachment record: 4aec1a09-2e21-4372-a6b8-2dc1b6afc6c0 {{(pid=69367) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 759.378842] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2ffce83-d0d3-412c-b013-25fd38539ef8 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.390248] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47c94042-d381-496a-a751-cc59cac7d416 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.421690] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-647a5144-cd33-4dde-bd93-b35ccd8295b5 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.430667] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9a2c328-bb09-42d5-9718-e45cdc69afc5 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.446286] env[69367]: DEBUG nova.compute.provider_tree [None req-89f1736f-ef76-4bc5-98e1-36367045736e tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 760.004404] env[69367]: ERROR nova.scheduler.client.report [None req-89f1736f-ef76-4bc5-98e1-36367045736e tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [req-099d8944-0281-4891-8fee-2c128fedb03e] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-099d8944-0281-4891-8fee-2c128fedb03e"}]} [ 760.004782] env[69367]: DEBUG oslo_concurrency.lockutils [None req-89f1736f-ef76-4bc5-98e1-36367045736e tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.202s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 760.005418] env[69367]: ERROR nova.compute.manager [None req-89f1736f-ef76-4bc5-98e1-36367045736e tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: 10419c72-9876-45d3-a941-46464b47fddc] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 760.005418] env[69367]: ERROR nova.compute.manager [instance: 10419c72-9876-45d3-a941-46464b47fddc] Traceback (most recent call last): [ 760.005418] env[69367]: ERROR nova.compute.manager [instance: 10419c72-9876-45d3-a941-46464b47fddc] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 760.005418] env[69367]: ERROR nova.compute.manager [instance: 10419c72-9876-45d3-a941-46464b47fddc] yield [ 760.005418] env[69367]: ERROR nova.compute.manager [instance: 10419c72-9876-45d3-a941-46464b47fddc] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 760.005418] env[69367]: ERROR nova.compute.manager [instance: 10419c72-9876-45d3-a941-46464b47fddc] self.set_inventory_for_provider( [ 760.005418] env[69367]: ERROR nova.compute.manager [instance: 10419c72-9876-45d3-a941-46464b47fddc] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 760.005418] env[69367]: ERROR nova.compute.manager [instance: 10419c72-9876-45d3-a941-46464b47fddc] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 760.005786] env[69367]: ERROR nova.compute.manager [instance: 10419c72-9876-45d3-a941-46464b47fddc] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-099d8944-0281-4891-8fee-2c128fedb03e"}]} [ 760.005786] env[69367]: ERROR nova.compute.manager [instance: 10419c72-9876-45d3-a941-46464b47fddc] [ 760.005786] env[69367]: ERROR nova.compute.manager [instance: 10419c72-9876-45d3-a941-46464b47fddc] During handling of the above exception, another exception occurred: [ 760.005786] env[69367]: ERROR nova.compute.manager [instance: 10419c72-9876-45d3-a941-46464b47fddc] [ 760.005786] env[69367]: ERROR nova.compute.manager [instance: 10419c72-9876-45d3-a941-46464b47fddc] Traceback (most recent call last): [ 760.005786] env[69367]: ERROR nova.compute.manager [instance: 10419c72-9876-45d3-a941-46464b47fddc] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 760.005786] env[69367]: ERROR nova.compute.manager [instance: 10419c72-9876-45d3-a941-46464b47fddc] with self.rt.instance_claim(context, instance, node, allocs, [ 760.005786] env[69367]: ERROR nova.compute.manager [instance: 10419c72-9876-45d3-a941-46464b47fddc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 760.005786] env[69367]: ERROR nova.compute.manager [instance: 10419c72-9876-45d3-a941-46464b47fddc] return f(*args, **kwargs) [ 760.006212] env[69367]: ERROR nova.compute.manager [instance: 10419c72-9876-45d3-a941-46464b47fddc] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 760.006212] env[69367]: ERROR nova.compute.manager [instance: 10419c72-9876-45d3-a941-46464b47fddc] self._update(elevated, cn) [ 760.006212] env[69367]: ERROR nova.compute.manager [instance: 10419c72-9876-45d3-a941-46464b47fddc] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 760.006212] env[69367]: ERROR nova.compute.manager [instance: 10419c72-9876-45d3-a941-46464b47fddc] self._update_to_placement(context, compute_node, startup) [ 760.006212] env[69367]: ERROR nova.compute.manager [instance: 10419c72-9876-45d3-a941-46464b47fddc] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 760.006212] env[69367]: ERROR nova.compute.manager [instance: 10419c72-9876-45d3-a941-46464b47fddc] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 760.006212] env[69367]: ERROR nova.compute.manager [instance: 10419c72-9876-45d3-a941-46464b47fddc] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 760.006212] env[69367]: ERROR nova.compute.manager [instance: 10419c72-9876-45d3-a941-46464b47fddc] return attempt.get(self._wrap_exception) [ 760.006212] env[69367]: ERROR nova.compute.manager [instance: 10419c72-9876-45d3-a941-46464b47fddc] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 760.006212] env[69367]: ERROR nova.compute.manager [instance: 10419c72-9876-45d3-a941-46464b47fddc] six.reraise(self.value[0], self.value[1], self.value[2]) [ 760.006212] env[69367]: ERROR nova.compute.manager [instance: 10419c72-9876-45d3-a941-46464b47fddc] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 760.006212] env[69367]: ERROR nova.compute.manager [instance: 10419c72-9876-45d3-a941-46464b47fddc] raise value [ 760.006212] env[69367]: ERROR nova.compute.manager [instance: 10419c72-9876-45d3-a941-46464b47fddc] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 760.006876] env[69367]: ERROR nova.compute.manager [instance: 10419c72-9876-45d3-a941-46464b47fddc] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 760.006876] env[69367]: ERROR nova.compute.manager [instance: 10419c72-9876-45d3-a941-46464b47fddc] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 760.006876] env[69367]: ERROR nova.compute.manager [instance: 10419c72-9876-45d3-a941-46464b47fddc] self.reportclient.update_from_provider_tree( [ 760.006876] env[69367]: ERROR nova.compute.manager [instance: 10419c72-9876-45d3-a941-46464b47fddc] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 760.006876] env[69367]: ERROR nova.compute.manager [instance: 10419c72-9876-45d3-a941-46464b47fddc] with catch_all(pd.uuid): [ 760.006876] env[69367]: ERROR nova.compute.manager [instance: 10419c72-9876-45d3-a941-46464b47fddc] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 760.006876] env[69367]: ERROR nova.compute.manager [instance: 10419c72-9876-45d3-a941-46464b47fddc] self.gen.throw(typ, value, traceback) [ 760.006876] env[69367]: ERROR nova.compute.manager [instance: 10419c72-9876-45d3-a941-46464b47fddc] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 760.006876] env[69367]: ERROR nova.compute.manager [instance: 10419c72-9876-45d3-a941-46464b47fddc] raise exception.ResourceProviderSyncFailed() [ 760.006876] env[69367]: ERROR nova.compute.manager [instance: 10419c72-9876-45d3-a941-46464b47fddc] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 760.006876] env[69367]: ERROR nova.compute.manager [instance: 10419c72-9876-45d3-a941-46464b47fddc] [ 760.007364] env[69367]: DEBUG nova.compute.utils [None req-89f1736f-ef76-4bc5-98e1-36367045736e tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: 10419c72-9876-45d3-a941-46464b47fddc] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 760.009052] env[69367]: DEBUG oslo_concurrency.lockutils [None req-cd36cff0-1cf6-4889-b011-15c7764840c6 tempest-InstanceActionsTestJSON-640821445 tempest-InstanceActionsTestJSON-640821445-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.368s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 760.010593] env[69367]: INFO nova.compute.claims [None req-cd36cff0-1cf6-4889-b011-15c7764840c6 tempest-InstanceActionsTestJSON-640821445 tempest-InstanceActionsTestJSON-640821445-project-member] [instance: 32ad9bbe-f92c-488d-a98a-d28bbfe8293f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 760.016442] env[69367]: DEBUG nova.compute.manager [None req-89f1736f-ef76-4bc5-98e1-36367045736e tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: 10419c72-9876-45d3-a941-46464b47fddc] Build of instance 10419c72-9876-45d3-a941-46464b47fddc was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 760.016442] env[69367]: DEBUG nova.compute.manager [None req-89f1736f-ef76-4bc5-98e1-36367045736e tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: 10419c72-9876-45d3-a941-46464b47fddc] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 760.016442] env[69367]: DEBUG oslo_concurrency.lockutils [None req-89f1736f-ef76-4bc5-98e1-36367045736e tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Acquiring lock "refresh_cache-10419c72-9876-45d3-a941-46464b47fddc" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 760.016683] env[69367]: DEBUG oslo_concurrency.lockutils [None req-89f1736f-ef76-4bc5-98e1-36367045736e tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Acquired lock "refresh_cache-10419c72-9876-45d3-a941-46464b47fddc" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 760.016760] env[69367]: DEBUG nova.network.neutron [None req-89f1736f-ef76-4bc5-98e1-36367045736e tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: 10419c72-9876-45d3-a941-46464b47fddc] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 760.549121] env[69367]: DEBUG nova.network.neutron [None req-89f1736f-ef76-4bc5-98e1-36367045736e tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: 10419c72-9876-45d3-a941-46464b47fddc] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 760.624746] env[69367]: DEBUG nova.network.neutron [None req-89f1736f-ef76-4bc5-98e1-36367045736e tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: 10419c72-9876-45d3-a941-46464b47fddc] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 761.052269] env[69367]: DEBUG nova.scheduler.client.report [None req-cd36cff0-1cf6-4889-b011-15c7764840c6 tempest-InstanceActionsTestJSON-640821445 tempest-InstanceActionsTestJSON-640821445-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 761.069233] env[69367]: DEBUG nova.scheduler.client.report [None req-cd36cff0-1cf6-4889-b011-15c7764840c6 tempest-InstanceActionsTestJSON-640821445 tempest-InstanceActionsTestJSON-640821445-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 761.069387] env[69367]: DEBUG nova.compute.provider_tree [None req-cd36cff0-1cf6-4889-b011-15c7764840c6 tempest-InstanceActionsTestJSON-640821445 tempest-InstanceActionsTestJSON-640821445-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 761.082611] env[69367]: DEBUG nova.scheduler.client.report [None req-cd36cff0-1cf6-4889-b011-15c7764840c6 tempest-InstanceActionsTestJSON-640821445 tempest-InstanceActionsTestJSON-640821445-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 761.103024] env[69367]: DEBUG nova.scheduler.client.report [None req-cd36cff0-1cf6-4889-b011-15c7764840c6 tempest-InstanceActionsTestJSON-640821445 tempest-InstanceActionsTestJSON-640821445-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 761.127871] env[69367]: DEBUG oslo_concurrency.lockutils [None req-89f1736f-ef76-4bc5-98e1-36367045736e tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Releasing lock "refresh_cache-10419c72-9876-45d3-a941-46464b47fddc" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 761.128118] env[69367]: DEBUG nova.compute.manager [None req-89f1736f-ef76-4bc5-98e1-36367045736e tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 761.128306] env[69367]: DEBUG nova.compute.manager [None req-89f1736f-ef76-4bc5-98e1-36367045736e tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: 10419c72-9876-45d3-a941-46464b47fddc] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 761.128509] env[69367]: DEBUG nova.network.neutron [None req-89f1736f-ef76-4bc5-98e1-36367045736e tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: 10419c72-9876-45d3-a941-46464b47fddc] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 761.147065] env[69367]: DEBUG nova.network.neutron [None req-89f1736f-ef76-4bc5-98e1-36367045736e tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: 10419c72-9876-45d3-a941-46464b47fddc] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 761.595607] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ce1e268-8e5d-4fff-8efa-61ca0b131fbe {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.603940] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dfbb6c5-a754-4339-ab16-e10e68f3147f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.635530] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c34279c9-6e9d-49be-9ed1-81ed2586628e {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.643527] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81cd1eda-5257-4b20-ae14-635c8943c713 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.657040] env[69367]: DEBUG nova.network.neutron [None req-89f1736f-ef76-4bc5-98e1-36367045736e tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: 10419c72-9876-45d3-a941-46464b47fddc] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 761.658424] env[69367]: DEBUG nova.compute.provider_tree [None req-cd36cff0-1cf6-4889-b011-15c7764840c6 tempest-InstanceActionsTestJSON-640821445 tempest-InstanceActionsTestJSON-640821445-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 762.161785] env[69367]: INFO nova.compute.manager [None req-89f1736f-ef76-4bc5-98e1-36367045736e tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: 10419c72-9876-45d3-a941-46464b47fddc] Took 1.03 seconds to deallocate network for instance. [ 762.185679] env[69367]: ERROR nova.scheduler.client.report [None req-cd36cff0-1cf6-4889-b011-15c7764840c6 tempest-InstanceActionsTestJSON-640821445 tempest-InstanceActionsTestJSON-640821445-project-member] [req-fa585f89-3e55-411e-91f0-6479dc81f2d2] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-fa585f89-3e55-411e-91f0-6479dc81f2d2"}]} [ 762.186127] env[69367]: DEBUG oslo_concurrency.lockutils [None req-cd36cff0-1cf6-4889-b011-15c7764840c6 tempest-InstanceActionsTestJSON-640821445 tempest-InstanceActionsTestJSON-640821445-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.177s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 762.186877] env[69367]: ERROR nova.compute.manager [None req-cd36cff0-1cf6-4889-b011-15c7764840c6 tempest-InstanceActionsTestJSON-640821445 tempest-InstanceActionsTestJSON-640821445-project-member] [instance: 32ad9bbe-f92c-488d-a98a-d28bbfe8293f] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 762.186877] env[69367]: ERROR nova.compute.manager [instance: 32ad9bbe-f92c-488d-a98a-d28bbfe8293f] Traceback (most recent call last): [ 762.186877] env[69367]: ERROR nova.compute.manager [instance: 32ad9bbe-f92c-488d-a98a-d28bbfe8293f] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 762.186877] env[69367]: ERROR nova.compute.manager [instance: 32ad9bbe-f92c-488d-a98a-d28bbfe8293f] yield [ 762.186877] env[69367]: ERROR nova.compute.manager [instance: 32ad9bbe-f92c-488d-a98a-d28bbfe8293f] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 762.186877] env[69367]: ERROR nova.compute.manager [instance: 32ad9bbe-f92c-488d-a98a-d28bbfe8293f] self.set_inventory_for_provider( [ 762.186877] env[69367]: ERROR nova.compute.manager [instance: 32ad9bbe-f92c-488d-a98a-d28bbfe8293f] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 762.186877] env[69367]: ERROR nova.compute.manager [instance: 32ad9bbe-f92c-488d-a98a-d28bbfe8293f] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 762.187164] env[69367]: ERROR nova.compute.manager [instance: 32ad9bbe-f92c-488d-a98a-d28bbfe8293f] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-fa585f89-3e55-411e-91f0-6479dc81f2d2"}]} [ 762.187164] env[69367]: ERROR nova.compute.manager [instance: 32ad9bbe-f92c-488d-a98a-d28bbfe8293f] [ 762.187164] env[69367]: ERROR nova.compute.manager [instance: 32ad9bbe-f92c-488d-a98a-d28bbfe8293f] During handling of the above exception, another exception occurred: [ 762.187164] env[69367]: ERROR nova.compute.manager [instance: 32ad9bbe-f92c-488d-a98a-d28bbfe8293f] [ 762.187164] env[69367]: ERROR nova.compute.manager [instance: 32ad9bbe-f92c-488d-a98a-d28bbfe8293f] Traceback (most recent call last): [ 762.187164] env[69367]: ERROR nova.compute.manager [instance: 32ad9bbe-f92c-488d-a98a-d28bbfe8293f] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 762.187164] env[69367]: ERROR nova.compute.manager [instance: 32ad9bbe-f92c-488d-a98a-d28bbfe8293f] with self.rt.instance_claim(context, instance, node, allocs, [ 762.187164] env[69367]: ERROR nova.compute.manager [instance: 32ad9bbe-f92c-488d-a98a-d28bbfe8293f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 762.187164] env[69367]: ERROR nova.compute.manager [instance: 32ad9bbe-f92c-488d-a98a-d28bbfe8293f] return f(*args, **kwargs) [ 762.187435] env[69367]: ERROR nova.compute.manager [instance: 32ad9bbe-f92c-488d-a98a-d28bbfe8293f] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 762.187435] env[69367]: ERROR nova.compute.manager [instance: 32ad9bbe-f92c-488d-a98a-d28bbfe8293f] self._update(elevated, cn) [ 762.187435] env[69367]: ERROR nova.compute.manager [instance: 32ad9bbe-f92c-488d-a98a-d28bbfe8293f] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 762.187435] env[69367]: ERROR nova.compute.manager [instance: 32ad9bbe-f92c-488d-a98a-d28bbfe8293f] self._update_to_placement(context, compute_node, startup) [ 762.187435] env[69367]: ERROR nova.compute.manager [instance: 32ad9bbe-f92c-488d-a98a-d28bbfe8293f] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 762.187435] env[69367]: ERROR nova.compute.manager [instance: 32ad9bbe-f92c-488d-a98a-d28bbfe8293f] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 762.187435] env[69367]: ERROR nova.compute.manager [instance: 32ad9bbe-f92c-488d-a98a-d28bbfe8293f] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 762.187435] env[69367]: ERROR nova.compute.manager [instance: 32ad9bbe-f92c-488d-a98a-d28bbfe8293f] return attempt.get(self._wrap_exception) [ 762.187435] env[69367]: ERROR nova.compute.manager [instance: 32ad9bbe-f92c-488d-a98a-d28bbfe8293f] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 762.187435] env[69367]: ERROR nova.compute.manager [instance: 32ad9bbe-f92c-488d-a98a-d28bbfe8293f] six.reraise(self.value[0], self.value[1], self.value[2]) [ 762.187435] env[69367]: ERROR nova.compute.manager [instance: 32ad9bbe-f92c-488d-a98a-d28bbfe8293f] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 762.187435] env[69367]: ERROR nova.compute.manager [instance: 32ad9bbe-f92c-488d-a98a-d28bbfe8293f] raise value [ 762.187435] env[69367]: ERROR nova.compute.manager [instance: 32ad9bbe-f92c-488d-a98a-d28bbfe8293f] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 762.187775] env[69367]: ERROR nova.compute.manager [instance: 32ad9bbe-f92c-488d-a98a-d28bbfe8293f] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 762.187775] env[69367]: ERROR nova.compute.manager [instance: 32ad9bbe-f92c-488d-a98a-d28bbfe8293f] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 762.187775] env[69367]: ERROR nova.compute.manager [instance: 32ad9bbe-f92c-488d-a98a-d28bbfe8293f] self.reportclient.update_from_provider_tree( [ 762.187775] env[69367]: ERROR nova.compute.manager [instance: 32ad9bbe-f92c-488d-a98a-d28bbfe8293f] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 762.187775] env[69367]: ERROR nova.compute.manager [instance: 32ad9bbe-f92c-488d-a98a-d28bbfe8293f] with catch_all(pd.uuid): [ 762.187775] env[69367]: ERROR nova.compute.manager [instance: 32ad9bbe-f92c-488d-a98a-d28bbfe8293f] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 762.187775] env[69367]: ERROR nova.compute.manager [instance: 32ad9bbe-f92c-488d-a98a-d28bbfe8293f] self.gen.throw(typ, value, traceback) [ 762.187775] env[69367]: ERROR nova.compute.manager [instance: 32ad9bbe-f92c-488d-a98a-d28bbfe8293f] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 762.187775] env[69367]: ERROR nova.compute.manager [instance: 32ad9bbe-f92c-488d-a98a-d28bbfe8293f] raise exception.ResourceProviderSyncFailed() [ 762.187775] env[69367]: ERROR nova.compute.manager [instance: 32ad9bbe-f92c-488d-a98a-d28bbfe8293f] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 762.187775] env[69367]: ERROR nova.compute.manager [instance: 32ad9bbe-f92c-488d-a98a-d28bbfe8293f] [ 762.188059] env[69367]: DEBUG nova.compute.utils [None req-cd36cff0-1cf6-4889-b011-15c7764840c6 tempest-InstanceActionsTestJSON-640821445 tempest-InstanceActionsTestJSON-640821445-project-member] [instance: 32ad9bbe-f92c-488d-a98a-d28bbfe8293f] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 762.189302] env[69367]: DEBUG oslo_concurrency.lockutils [None req-05f963a1-2d97-4edd-9c3d-11be8b7b68ec tempest-ServerAddressesNegativeTestJSON-302550741 tempest-ServerAddressesNegativeTestJSON-302550741-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.258s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 762.190897] env[69367]: INFO nova.compute.claims [None req-05f963a1-2d97-4edd-9c3d-11be8b7b68ec tempest-ServerAddressesNegativeTestJSON-302550741 tempest-ServerAddressesNegativeTestJSON-302550741-project-member] [instance: e4db7bcc-26dd-4f0d-80da-655a58c80783] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 762.193842] env[69367]: DEBUG nova.compute.manager [None req-cd36cff0-1cf6-4889-b011-15c7764840c6 tempest-InstanceActionsTestJSON-640821445 tempest-InstanceActionsTestJSON-640821445-project-member] [instance: 32ad9bbe-f92c-488d-a98a-d28bbfe8293f] Build of instance 32ad9bbe-f92c-488d-a98a-d28bbfe8293f was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 762.194339] env[69367]: DEBUG nova.compute.manager [None req-cd36cff0-1cf6-4889-b011-15c7764840c6 tempest-InstanceActionsTestJSON-640821445 tempest-InstanceActionsTestJSON-640821445-project-member] [instance: 32ad9bbe-f92c-488d-a98a-d28bbfe8293f] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 762.194597] env[69367]: DEBUG oslo_concurrency.lockutils [None req-cd36cff0-1cf6-4889-b011-15c7764840c6 tempest-InstanceActionsTestJSON-640821445 tempest-InstanceActionsTestJSON-640821445-project-member] Acquiring lock "refresh_cache-32ad9bbe-f92c-488d-a98a-d28bbfe8293f" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 762.194801] env[69367]: DEBUG oslo_concurrency.lockutils [None req-cd36cff0-1cf6-4889-b011-15c7764840c6 tempest-InstanceActionsTestJSON-640821445 tempest-InstanceActionsTestJSON-640821445-project-member] Acquired lock "refresh_cache-32ad9bbe-f92c-488d-a98a-d28bbfe8293f" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 762.194999] env[69367]: DEBUG nova.network.neutron [None req-cd36cff0-1cf6-4889-b011-15c7764840c6 tempest-InstanceActionsTestJSON-640821445 tempest-InstanceActionsTestJSON-640821445-project-member] [instance: 32ad9bbe-f92c-488d-a98a-d28bbfe8293f] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 762.203834] env[69367]: DEBUG oslo_concurrency.lockutils [None req-6d85fd21-9fe9-4ff3-9e1e-024b8ac24ad1 tempest-ServerShowV254Test-432952848 tempest-ServerShowV254Test-432952848-project-member] Acquiring lock "484ce161-5686-4573-8eed-4ebb3505e843" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 762.203834] env[69367]: DEBUG oslo_concurrency.lockutils [None req-6d85fd21-9fe9-4ff3-9e1e-024b8ac24ad1 tempest-ServerShowV254Test-432952848 tempest-ServerShowV254Test-432952848-project-member] Lock "484ce161-5686-4573-8eed-4ebb3505e843" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 762.717216] env[69367]: DEBUG nova.network.neutron [None req-cd36cff0-1cf6-4889-b011-15c7764840c6 tempest-InstanceActionsTestJSON-640821445 tempest-InstanceActionsTestJSON-640821445-project-member] [instance: 32ad9bbe-f92c-488d-a98a-d28bbfe8293f] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 762.806705] env[69367]: DEBUG nova.network.neutron [None req-cd36cff0-1cf6-4889-b011-15c7764840c6 tempest-InstanceActionsTestJSON-640821445 tempest-InstanceActionsTestJSON-640821445-project-member] [instance: 32ad9bbe-f92c-488d-a98a-d28bbfe8293f] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 763.198075] env[69367]: INFO nova.scheduler.client.report [None req-89f1736f-ef76-4bc5-98e1-36367045736e tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Deleted allocations for instance 10419c72-9876-45d3-a941-46464b47fddc [ 763.225971] env[69367]: DEBUG nova.scheduler.client.report [None req-05f963a1-2d97-4edd-9c3d-11be8b7b68ec tempest-ServerAddressesNegativeTestJSON-302550741 tempest-ServerAddressesNegativeTestJSON-302550741-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 763.242511] env[69367]: DEBUG nova.scheduler.client.report [None req-05f963a1-2d97-4edd-9c3d-11be8b7b68ec tempest-ServerAddressesNegativeTestJSON-302550741 tempest-ServerAddressesNegativeTestJSON-302550741-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 763.242950] env[69367]: DEBUG nova.compute.provider_tree [None req-05f963a1-2d97-4edd-9c3d-11be8b7b68ec tempest-ServerAddressesNegativeTestJSON-302550741 tempest-ServerAddressesNegativeTestJSON-302550741-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 763.255995] env[69367]: DEBUG nova.scheduler.client.report [None req-05f963a1-2d97-4edd-9c3d-11be8b7b68ec tempest-ServerAddressesNegativeTestJSON-302550741 tempest-ServerAddressesNegativeTestJSON-302550741-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 763.285134] env[69367]: DEBUG nova.scheduler.client.report [None req-05f963a1-2d97-4edd-9c3d-11be8b7b68ec tempest-ServerAddressesNegativeTestJSON-302550741 tempest-ServerAddressesNegativeTestJSON-302550741-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 763.310068] env[69367]: DEBUG oslo_concurrency.lockutils [None req-cd36cff0-1cf6-4889-b011-15c7764840c6 tempest-InstanceActionsTestJSON-640821445 tempest-InstanceActionsTestJSON-640821445-project-member] Releasing lock "refresh_cache-32ad9bbe-f92c-488d-a98a-d28bbfe8293f" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 763.310327] env[69367]: DEBUG nova.compute.manager [None req-cd36cff0-1cf6-4889-b011-15c7764840c6 tempest-InstanceActionsTestJSON-640821445 tempest-InstanceActionsTestJSON-640821445-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 763.310513] env[69367]: DEBUG nova.compute.manager [None req-cd36cff0-1cf6-4889-b011-15c7764840c6 tempest-InstanceActionsTestJSON-640821445 tempest-InstanceActionsTestJSON-640821445-project-member] [instance: 32ad9bbe-f92c-488d-a98a-d28bbfe8293f] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 763.310685] env[69367]: DEBUG nova.network.neutron [None req-cd36cff0-1cf6-4889-b011-15c7764840c6 tempest-InstanceActionsTestJSON-640821445 tempest-InstanceActionsTestJSON-640821445-project-member] [instance: 32ad9bbe-f92c-488d-a98a-d28bbfe8293f] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 763.328579] env[69367]: DEBUG nova.network.neutron [None req-cd36cff0-1cf6-4889-b011-15c7764840c6 tempest-InstanceActionsTestJSON-640821445 tempest-InstanceActionsTestJSON-640821445-project-member] [instance: 32ad9bbe-f92c-488d-a98a-d28bbfe8293f] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 763.556341] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-eccf0c79-5df1-4484-9bf6-63438aa251d1 tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Volume attach. Driver type: vmdk {{(pid=69367) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 763.556589] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-eccf0c79-5df1-4484-9bf6-63438aa251d1 tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-837720', 'volume_id': '0572f679-bc16-40f7-b698-92ec30d0b913', 'name': 'volume-0572f679-bc16-40f7-b698-92ec30d0b913', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd2f8328d-fd05-4e63-9cbd-a6e3ec948964', 'attached_at': '', 'detached_at': '', 'volume_id': '0572f679-bc16-40f7-b698-92ec30d0b913', 'serial': '0572f679-bc16-40f7-b698-92ec30d0b913'} {{(pid=69367) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 763.557521] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e82f22f5-dbf8-45ec-802b-59619a91a6c8 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.576595] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fd0620f-7941-4268-9c47-1e8e0ef1f560 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.602928] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-eccf0c79-5df1-4484-9bf6-63438aa251d1 tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Reconfiguring VM instance instance-00000018 to attach disk [localhost-esx-install-datastore (1)] volume-0572f679-bc16-40f7-b698-92ec30d0b913/volume-0572f679-bc16-40f7-b698-92ec30d0b913.vmdk or device None with type thin {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 763.606398] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c9b0b0ec-e68c-4094-99ec-0ef0d624ebb8 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.625381] env[69367]: DEBUG oslo_vmware.api [None req-eccf0c79-5df1-4484-9bf6-63438aa251d1 tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Waiting for the task: (returnval){ [ 763.625381] env[69367]: value = "task-4233965" [ 763.625381] env[69367]: _type = "Task" [ 763.625381] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.635052] env[69367]: DEBUG oslo_vmware.api [None req-eccf0c79-5df1-4484-9bf6-63438aa251d1 tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Task: {'id': task-4233965, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.708561] env[69367]: DEBUG oslo_concurrency.lockutils [None req-89f1736f-ef76-4bc5-98e1-36367045736e tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Lock "10419c72-9876-45d3-a941-46464b47fddc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 62.044s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 763.780390] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4bbe558-25a7-4853-8dec-6be7a3c8310c {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.791032] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73b0573c-7fdc-4eca-8e0c-0bd417362fdb {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.822977] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-defc1314-adf1-443d-a183-b849adeecd0f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.831706] env[69367]: DEBUG nova.network.neutron [None req-cd36cff0-1cf6-4889-b011-15c7764840c6 tempest-InstanceActionsTestJSON-640821445 tempest-InstanceActionsTestJSON-640821445-project-member] [instance: 32ad9bbe-f92c-488d-a98a-d28bbfe8293f] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 763.835524] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39b8fc96-adaf-46cf-85f6-1ea27850af3a {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.852237] env[69367]: DEBUG nova.compute.provider_tree [None req-05f963a1-2d97-4edd-9c3d-11be8b7b68ec tempest-ServerAddressesNegativeTestJSON-302550741 tempest-ServerAddressesNegativeTestJSON-302550741-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 764.136925] env[69367]: DEBUG oslo_vmware.api [None req-eccf0c79-5df1-4484-9bf6-63438aa251d1 tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Task: {'id': task-4233965, 'name': ReconfigVM_Task, 'duration_secs': 0.227013} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.137778] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-eccf0c79-5df1-4484-9bf6-63438aa251d1 tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Reconfigured VM instance instance-00000018 to attach disk [localhost-esx-install-datastore (1)] volume-0572f679-bc16-40f7-b698-92ec30d0b913/volume-0572f679-bc16-40f7-b698-92ec30d0b913.vmdk or device None with type thin {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 764.142376] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-277dc31e-64ca-4538-bc76-61b71b47fdd2 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.157771] env[69367]: DEBUG oslo_vmware.api [None req-eccf0c79-5df1-4484-9bf6-63438aa251d1 tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Waiting for the task: (returnval){ [ 764.157771] env[69367]: value = "task-4233966" [ 764.157771] env[69367]: _type = "Task" [ 764.157771] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.167927] env[69367]: DEBUG oslo_vmware.api [None req-eccf0c79-5df1-4484-9bf6-63438aa251d1 tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Task: {'id': task-4233966, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.214087] env[69367]: DEBUG nova.compute.manager [None req-c1bd6450-02b3-47f3-be26-375686994005 tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] [instance: f3386485-a173-4f5d-8f29-4972df3ae468] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 764.336263] env[69367]: INFO nova.compute.manager [None req-cd36cff0-1cf6-4889-b011-15c7764840c6 tempest-InstanceActionsTestJSON-640821445 tempest-InstanceActionsTestJSON-640821445-project-member] [instance: 32ad9bbe-f92c-488d-a98a-d28bbfe8293f] Took 1.02 seconds to deallocate network for instance. [ 764.375473] env[69367]: ERROR nova.scheduler.client.report [None req-05f963a1-2d97-4edd-9c3d-11be8b7b68ec tempest-ServerAddressesNegativeTestJSON-302550741 tempest-ServerAddressesNegativeTestJSON-302550741-project-member] [req-f4e0cc4b-eea8-4ec9-b82a-1a0a9d70fd09] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-f4e0cc4b-eea8-4ec9-b82a-1a0a9d70fd09"}]} [ 764.375975] env[69367]: DEBUG oslo_concurrency.lockutils [None req-05f963a1-2d97-4edd-9c3d-11be8b7b68ec tempest-ServerAddressesNegativeTestJSON-302550741 tempest-ServerAddressesNegativeTestJSON-302550741-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.187s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 764.376501] env[69367]: ERROR nova.compute.manager [None req-05f963a1-2d97-4edd-9c3d-11be8b7b68ec tempest-ServerAddressesNegativeTestJSON-302550741 tempest-ServerAddressesNegativeTestJSON-302550741-project-member] [instance: e4db7bcc-26dd-4f0d-80da-655a58c80783] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 764.376501] env[69367]: ERROR nova.compute.manager [instance: e4db7bcc-26dd-4f0d-80da-655a58c80783] Traceback (most recent call last): [ 764.376501] env[69367]: ERROR nova.compute.manager [instance: e4db7bcc-26dd-4f0d-80da-655a58c80783] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 764.376501] env[69367]: ERROR nova.compute.manager [instance: e4db7bcc-26dd-4f0d-80da-655a58c80783] yield [ 764.376501] env[69367]: ERROR nova.compute.manager [instance: e4db7bcc-26dd-4f0d-80da-655a58c80783] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 764.376501] env[69367]: ERROR nova.compute.manager [instance: e4db7bcc-26dd-4f0d-80da-655a58c80783] self.set_inventory_for_provider( [ 764.376501] env[69367]: ERROR nova.compute.manager [instance: e4db7bcc-26dd-4f0d-80da-655a58c80783] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 764.376501] env[69367]: ERROR nova.compute.manager [instance: e4db7bcc-26dd-4f0d-80da-655a58c80783] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 764.376756] env[69367]: ERROR nova.compute.manager [instance: e4db7bcc-26dd-4f0d-80da-655a58c80783] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-f4e0cc4b-eea8-4ec9-b82a-1a0a9d70fd09"}]} [ 764.376756] env[69367]: ERROR nova.compute.manager [instance: e4db7bcc-26dd-4f0d-80da-655a58c80783] [ 764.376756] env[69367]: ERROR nova.compute.manager [instance: e4db7bcc-26dd-4f0d-80da-655a58c80783] During handling of the above exception, another exception occurred: [ 764.376756] env[69367]: ERROR nova.compute.manager [instance: e4db7bcc-26dd-4f0d-80da-655a58c80783] [ 764.376756] env[69367]: ERROR nova.compute.manager [instance: e4db7bcc-26dd-4f0d-80da-655a58c80783] Traceback (most recent call last): [ 764.376756] env[69367]: ERROR nova.compute.manager [instance: e4db7bcc-26dd-4f0d-80da-655a58c80783] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 764.376756] env[69367]: ERROR nova.compute.manager [instance: e4db7bcc-26dd-4f0d-80da-655a58c80783] with self.rt.instance_claim(context, instance, node, allocs, [ 764.376756] env[69367]: ERROR nova.compute.manager [instance: e4db7bcc-26dd-4f0d-80da-655a58c80783] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 764.376756] env[69367]: ERROR nova.compute.manager [instance: e4db7bcc-26dd-4f0d-80da-655a58c80783] return f(*args, **kwargs) [ 764.377262] env[69367]: ERROR nova.compute.manager [instance: e4db7bcc-26dd-4f0d-80da-655a58c80783] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 764.377262] env[69367]: ERROR nova.compute.manager [instance: e4db7bcc-26dd-4f0d-80da-655a58c80783] self._update(elevated, cn) [ 764.377262] env[69367]: ERROR nova.compute.manager [instance: e4db7bcc-26dd-4f0d-80da-655a58c80783] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 764.377262] env[69367]: ERROR nova.compute.manager [instance: e4db7bcc-26dd-4f0d-80da-655a58c80783] self._update_to_placement(context, compute_node, startup) [ 764.377262] env[69367]: ERROR nova.compute.manager [instance: e4db7bcc-26dd-4f0d-80da-655a58c80783] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 764.377262] env[69367]: ERROR nova.compute.manager [instance: e4db7bcc-26dd-4f0d-80da-655a58c80783] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 764.377262] env[69367]: ERROR nova.compute.manager [instance: e4db7bcc-26dd-4f0d-80da-655a58c80783] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 764.377262] env[69367]: ERROR nova.compute.manager [instance: e4db7bcc-26dd-4f0d-80da-655a58c80783] return attempt.get(self._wrap_exception) [ 764.377262] env[69367]: ERROR nova.compute.manager [instance: e4db7bcc-26dd-4f0d-80da-655a58c80783] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 764.377262] env[69367]: ERROR nova.compute.manager [instance: e4db7bcc-26dd-4f0d-80da-655a58c80783] six.reraise(self.value[0], self.value[1], self.value[2]) [ 764.377262] env[69367]: ERROR nova.compute.manager [instance: e4db7bcc-26dd-4f0d-80da-655a58c80783] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 764.377262] env[69367]: ERROR nova.compute.manager [instance: e4db7bcc-26dd-4f0d-80da-655a58c80783] raise value [ 764.377262] env[69367]: ERROR nova.compute.manager [instance: e4db7bcc-26dd-4f0d-80da-655a58c80783] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 764.377625] env[69367]: ERROR nova.compute.manager [instance: e4db7bcc-26dd-4f0d-80da-655a58c80783] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 764.377625] env[69367]: ERROR nova.compute.manager [instance: e4db7bcc-26dd-4f0d-80da-655a58c80783] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 764.377625] env[69367]: ERROR nova.compute.manager [instance: e4db7bcc-26dd-4f0d-80da-655a58c80783] self.reportclient.update_from_provider_tree( [ 764.377625] env[69367]: ERROR nova.compute.manager [instance: e4db7bcc-26dd-4f0d-80da-655a58c80783] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 764.377625] env[69367]: ERROR nova.compute.manager [instance: e4db7bcc-26dd-4f0d-80da-655a58c80783] with catch_all(pd.uuid): [ 764.377625] env[69367]: ERROR nova.compute.manager [instance: e4db7bcc-26dd-4f0d-80da-655a58c80783] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 764.377625] env[69367]: ERROR nova.compute.manager [instance: e4db7bcc-26dd-4f0d-80da-655a58c80783] self.gen.throw(typ, value, traceback) [ 764.377625] env[69367]: ERROR nova.compute.manager [instance: e4db7bcc-26dd-4f0d-80da-655a58c80783] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 764.377625] env[69367]: ERROR nova.compute.manager [instance: e4db7bcc-26dd-4f0d-80da-655a58c80783] raise exception.ResourceProviderSyncFailed() [ 764.377625] env[69367]: ERROR nova.compute.manager [instance: e4db7bcc-26dd-4f0d-80da-655a58c80783] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 764.377625] env[69367]: ERROR nova.compute.manager [instance: e4db7bcc-26dd-4f0d-80da-655a58c80783] [ 764.377909] env[69367]: DEBUG nova.compute.utils [None req-05f963a1-2d97-4edd-9c3d-11be8b7b68ec tempest-ServerAddressesNegativeTestJSON-302550741 tempest-ServerAddressesNegativeTestJSON-302550741-project-member] [instance: e4db7bcc-26dd-4f0d-80da-655a58c80783] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 764.378328] env[69367]: DEBUG oslo_concurrency.lockutils [None req-601e8c8f-7848-449b-9807-239b90966e66 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.130s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 764.378688] env[69367]: DEBUG oslo_concurrency.lockutils [None req-601e8c8f-7848-449b-9807-239b90966e66 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 764.380829] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5c130717-e658-4600-96f7-a983167af7af tempest-ServersTestFqdnHostnames-2115646556 tempest-ServersTestFqdnHostnames-2115646556-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.233s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 764.382688] env[69367]: INFO nova.compute.claims [None req-5c130717-e658-4600-96f7-a983167af7af tempest-ServersTestFqdnHostnames-2115646556 tempest-ServersTestFqdnHostnames-2115646556-project-member] [instance: 40e49f7b-e5f7-4673-a764-d8cec8a3cf18] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 764.386266] env[69367]: DEBUG nova.compute.manager [None req-05f963a1-2d97-4edd-9c3d-11be8b7b68ec tempest-ServerAddressesNegativeTestJSON-302550741 tempest-ServerAddressesNegativeTestJSON-302550741-project-member] [instance: e4db7bcc-26dd-4f0d-80da-655a58c80783] Build of instance e4db7bcc-26dd-4f0d-80da-655a58c80783 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 764.386696] env[69367]: DEBUG nova.compute.manager [None req-05f963a1-2d97-4edd-9c3d-11be8b7b68ec tempest-ServerAddressesNegativeTestJSON-302550741 tempest-ServerAddressesNegativeTestJSON-302550741-project-member] [instance: e4db7bcc-26dd-4f0d-80da-655a58c80783] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 764.386929] env[69367]: DEBUG oslo_concurrency.lockutils [None req-05f963a1-2d97-4edd-9c3d-11be8b7b68ec tempest-ServerAddressesNegativeTestJSON-302550741 tempest-ServerAddressesNegativeTestJSON-302550741-project-member] Acquiring lock "refresh_cache-e4db7bcc-26dd-4f0d-80da-655a58c80783" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 764.387093] env[69367]: DEBUG oslo_concurrency.lockutils [None req-05f963a1-2d97-4edd-9c3d-11be8b7b68ec tempest-ServerAddressesNegativeTestJSON-302550741 tempest-ServerAddressesNegativeTestJSON-302550741-project-member] Acquired lock "refresh_cache-e4db7bcc-26dd-4f0d-80da-655a58c80783" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 764.387378] env[69367]: DEBUG nova.network.neutron [None req-05f963a1-2d97-4edd-9c3d-11be8b7b68ec tempest-ServerAddressesNegativeTestJSON-302550741 tempest-ServerAddressesNegativeTestJSON-302550741-project-member] [instance: e4db7bcc-26dd-4f0d-80da-655a58c80783] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 764.402359] env[69367]: INFO nova.scheduler.client.report [None req-601e8c8f-7848-449b-9807-239b90966e66 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Deleted allocations for instance 788b843c-1496-4562-a761-44f3e1ce6da2 [ 764.668313] env[69367]: DEBUG oslo_vmware.api [None req-eccf0c79-5df1-4484-9bf6-63438aa251d1 tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Task: {'id': task-4233966, 'name': ReconfigVM_Task, 'duration_secs': 0.160222} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.668636] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-eccf0c79-5df1-4484-9bf6-63438aa251d1 tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-837720', 'volume_id': '0572f679-bc16-40f7-b698-92ec30d0b913', 'name': 'volume-0572f679-bc16-40f7-b698-92ec30d0b913', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd2f8328d-fd05-4e63-9cbd-a6e3ec948964', 'attached_at': '', 'detached_at': '', 'volume_id': '0572f679-bc16-40f7-b698-92ec30d0b913', 'serial': '0572f679-bc16-40f7-b698-92ec30d0b913'} {{(pid=69367) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 764.734419] env[69367]: DEBUG oslo_concurrency.lockutils [None req-c1bd6450-02b3-47f3-be26-375686994005 tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 764.846458] env[69367]: DEBUG oslo_concurrency.lockutils [None req-3d051d46-95b5-458b-8d77-42673c6fbe04 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Acquiring lock "bb59f765-0d86-4803-845c-8186e9341702" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 764.846770] env[69367]: DEBUG oslo_concurrency.lockutils [None req-3d051d46-95b5-458b-8d77-42673c6fbe04 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Lock "bb59f765-0d86-4803-845c-8186e9341702" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 764.909387] env[69367]: DEBUG nova.network.neutron [None req-05f963a1-2d97-4edd-9c3d-11be8b7b68ec tempest-ServerAddressesNegativeTestJSON-302550741 tempest-ServerAddressesNegativeTestJSON-302550741-project-member] [instance: e4db7bcc-26dd-4f0d-80da-655a58c80783] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 764.911668] env[69367]: DEBUG oslo_concurrency.lockutils [None req-601e8c8f-7848-449b-9807-239b90966e66 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Lock "788b843c-1496-4562-a761-44f3e1ce6da2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.213s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 765.002631] env[69367]: DEBUG nova.network.neutron [None req-05f963a1-2d97-4edd-9c3d-11be8b7b68ec tempest-ServerAddressesNegativeTestJSON-302550741 tempest-ServerAddressesNegativeTestJSON-302550741-project-member] [instance: e4db7bcc-26dd-4f0d-80da-655a58c80783] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 765.370697] env[69367]: INFO nova.scheduler.client.report [None req-cd36cff0-1cf6-4889-b011-15c7764840c6 tempest-InstanceActionsTestJSON-640821445 tempest-InstanceActionsTestJSON-640821445-project-member] Deleted allocations for instance 32ad9bbe-f92c-488d-a98a-d28bbfe8293f [ 765.412126] env[69367]: DEBUG nova.scheduler.client.report [None req-5c130717-e658-4600-96f7-a983167af7af tempest-ServersTestFqdnHostnames-2115646556 tempest-ServersTestFqdnHostnames-2115646556-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 765.428026] env[69367]: DEBUG nova.scheduler.client.report [None req-5c130717-e658-4600-96f7-a983167af7af tempest-ServersTestFqdnHostnames-2115646556 tempest-ServersTestFqdnHostnames-2115646556-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 765.428026] env[69367]: DEBUG nova.compute.provider_tree [None req-5c130717-e658-4600-96f7-a983167af7af tempest-ServersTestFqdnHostnames-2115646556 tempest-ServersTestFqdnHostnames-2115646556-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 765.440239] env[69367]: DEBUG nova.scheduler.client.report [None req-5c130717-e658-4600-96f7-a983167af7af tempest-ServersTestFqdnHostnames-2115646556 tempest-ServersTestFqdnHostnames-2115646556-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 765.459292] env[69367]: DEBUG nova.scheduler.client.report [None req-5c130717-e658-4600-96f7-a983167af7af tempest-ServersTestFqdnHostnames-2115646556 tempest-ServersTestFqdnHostnames-2115646556-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 765.505905] env[69367]: DEBUG oslo_concurrency.lockutils [None req-05f963a1-2d97-4edd-9c3d-11be8b7b68ec tempest-ServerAddressesNegativeTestJSON-302550741 tempest-ServerAddressesNegativeTestJSON-302550741-project-member] Releasing lock "refresh_cache-e4db7bcc-26dd-4f0d-80da-655a58c80783" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 765.506134] env[69367]: DEBUG nova.compute.manager [None req-05f963a1-2d97-4edd-9c3d-11be8b7b68ec tempest-ServerAddressesNegativeTestJSON-302550741 tempest-ServerAddressesNegativeTestJSON-302550741-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 765.506330] env[69367]: DEBUG nova.compute.manager [None req-05f963a1-2d97-4edd-9c3d-11be8b7b68ec tempest-ServerAddressesNegativeTestJSON-302550741 tempest-ServerAddressesNegativeTestJSON-302550741-project-member] [instance: e4db7bcc-26dd-4f0d-80da-655a58c80783] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 765.507044] env[69367]: DEBUG nova.network.neutron [None req-05f963a1-2d97-4edd-9c3d-11be8b7b68ec tempest-ServerAddressesNegativeTestJSON-302550741 tempest-ServerAddressesNegativeTestJSON-302550741-project-member] [instance: e4db7bcc-26dd-4f0d-80da-655a58c80783] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 765.522135] env[69367]: DEBUG nova.network.neutron [None req-05f963a1-2d97-4edd-9c3d-11be8b7b68ec tempest-ServerAddressesNegativeTestJSON-302550741 tempest-ServerAddressesNegativeTestJSON-302550741-project-member] [instance: e4db7bcc-26dd-4f0d-80da-655a58c80783] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 765.711740] env[69367]: DEBUG nova.objects.instance [None req-eccf0c79-5df1-4484-9bf6-63438aa251d1 tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Lazy-loading 'flavor' on Instance uuid d2f8328d-fd05-4e63-9cbd-a6e3ec948964 {{(pid=69367) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 765.879044] env[69367]: DEBUG oslo_concurrency.lockutils [None req-cd36cff0-1cf6-4889-b011-15c7764840c6 tempest-InstanceActionsTestJSON-640821445 tempest-InstanceActionsTestJSON-640821445-project-member] Lock "32ad9bbe-f92c-488d-a98a-d28bbfe8293f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 58.030s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 765.883099] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62e4dd0b-35b4-4f46-b220-4825344fd588 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.893309] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1210fc0f-e4f7-4205-b37a-1a4362656e8e {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.925998] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9c54f54-5bdd-4fe5-ad82-f75b608c4e90 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.935564] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e8dedc6-de16-4678-ae69-e438dcaff2ac {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.951292] env[69367]: DEBUG nova.compute.provider_tree [None req-5c130717-e658-4600-96f7-a983167af7af tempest-ServersTestFqdnHostnames-2115646556 tempest-ServersTestFqdnHostnames-2115646556-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 766.024821] env[69367]: DEBUG nova.network.neutron [None req-05f963a1-2d97-4edd-9c3d-11be8b7b68ec tempest-ServerAddressesNegativeTestJSON-302550741 tempest-ServerAddressesNegativeTestJSON-302550741-project-member] [instance: e4db7bcc-26dd-4f0d-80da-655a58c80783] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 766.216996] env[69367]: DEBUG oslo_concurrency.lockutils [None req-eccf0c79-5df1-4484-9bf6-63438aa251d1 tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Lock "d2f8328d-fd05-4e63-9cbd-a6e3ec948964" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.294s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 766.384173] env[69367]: DEBUG nova.compute.manager [None req-2f962397-ad7d-47ec-8c2c-a1310a8036a7 tempest-ServersAdminNegativeTestJSON-2086084525 tempest-ServersAdminNegativeTestJSON-2086084525-project-member] [instance: 236173c7-9464-44b5-83a5-6ff60eedcc6a] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 766.483577] env[69367]: ERROR nova.scheduler.client.report [None req-5c130717-e658-4600-96f7-a983167af7af tempest-ServersTestFqdnHostnames-2115646556 tempest-ServersTestFqdnHostnames-2115646556-project-member] [req-45c337a8-a7be-4823-ac90-bb79645e1c1e] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-45c337a8-a7be-4823-ac90-bb79645e1c1e"}]} [ 766.483957] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5c130717-e658-4600-96f7-a983167af7af tempest-ServersTestFqdnHostnames-2115646556 tempest-ServersTestFqdnHostnames-2115646556-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.103s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 766.484575] env[69367]: ERROR nova.compute.manager [None req-5c130717-e658-4600-96f7-a983167af7af tempest-ServersTestFqdnHostnames-2115646556 tempest-ServersTestFqdnHostnames-2115646556-project-member] [instance: 40e49f7b-e5f7-4673-a764-d8cec8a3cf18] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 766.484575] env[69367]: ERROR nova.compute.manager [instance: 40e49f7b-e5f7-4673-a764-d8cec8a3cf18] Traceback (most recent call last): [ 766.484575] env[69367]: ERROR nova.compute.manager [instance: 40e49f7b-e5f7-4673-a764-d8cec8a3cf18] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 766.484575] env[69367]: ERROR nova.compute.manager [instance: 40e49f7b-e5f7-4673-a764-d8cec8a3cf18] yield [ 766.484575] env[69367]: ERROR nova.compute.manager [instance: 40e49f7b-e5f7-4673-a764-d8cec8a3cf18] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 766.484575] env[69367]: ERROR nova.compute.manager [instance: 40e49f7b-e5f7-4673-a764-d8cec8a3cf18] self.set_inventory_for_provider( [ 766.484575] env[69367]: ERROR nova.compute.manager [instance: 40e49f7b-e5f7-4673-a764-d8cec8a3cf18] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 766.484575] env[69367]: ERROR nova.compute.manager [instance: 40e49f7b-e5f7-4673-a764-d8cec8a3cf18] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 766.484947] env[69367]: ERROR nova.compute.manager [instance: 40e49f7b-e5f7-4673-a764-d8cec8a3cf18] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-45c337a8-a7be-4823-ac90-bb79645e1c1e"}]} [ 766.484947] env[69367]: ERROR nova.compute.manager [instance: 40e49f7b-e5f7-4673-a764-d8cec8a3cf18] [ 766.484947] env[69367]: ERROR nova.compute.manager [instance: 40e49f7b-e5f7-4673-a764-d8cec8a3cf18] During handling of the above exception, another exception occurred: [ 766.484947] env[69367]: ERROR nova.compute.manager [instance: 40e49f7b-e5f7-4673-a764-d8cec8a3cf18] [ 766.484947] env[69367]: ERROR nova.compute.manager [instance: 40e49f7b-e5f7-4673-a764-d8cec8a3cf18] Traceback (most recent call last): [ 766.484947] env[69367]: ERROR nova.compute.manager [instance: 40e49f7b-e5f7-4673-a764-d8cec8a3cf18] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 766.484947] env[69367]: ERROR nova.compute.manager [instance: 40e49f7b-e5f7-4673-a764-d8cec8a3cf18] with self.rt.instance_claim(context, instance, node, allocs, [ 766.484947] env[69367]: ERROR nova.compute.manager [instance: 40e49f7b-e5f7-4673-a764-d8cec8a3cf18] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 766.484947] env[69367]: ERROR nova.compute.manager [instance: 40e49f7b-e5f7-4673-a764-d8cec8a3cf18] return f(*args, **kwargs) [ 766.485413] env[69367]: ERROR nova.compute.manager [instance: 40e49f7b-e5f7-4673-a764-d8cec8a3cf18] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 766.485413] env[69367]: ERROR nova.compute.manager [instance: 40e49f7b-e5f7-4673-a764-d8cec8a3cf18] self._update(elevated, cn) [ 766.485413] env[69367]: ERROR nova.compute.manager [instance: 40e49f7b-e5f7-4673-a764-d8cec8a3cf18] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 766.485413] env[69367]: ERROR nova.compute.manager [instance: 40e49f7b-e5f7-4673-a764-d8cec8a3cf18] self._update_to_placement(context, compute_node, startup) [ 766.485413] env[69367]: ERROR nova.compute.manager [instance: 40e49f7b-e5f7-4673-a764-d8cec8a3cf18] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 766.485413] env[69367]: ERROR nova.compute.manager [instance: 40e49f7b-e5f7-4673-a764-d8cec8a3cf18] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 766.485413] env[69367]: ERROR nova.compute.manager [instance: 40e49f7b-e5f7-4673-a764-d8cec8a3cf18] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 766.485413] env[69367]: ERROR nova.compute.manager [instance: 40e49f7b-e5f7-4673-a764-d8cec8a3cf18] return attempt.get(self._wrap_exception) [ 766.485413] env[69367]: ERROR nova.compute.manager [instance: 40e49f7b-e5f7-4673-a764-d8cec8a3cf18] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 766.485413] env[69367]: ERROR nova.compute.manager [instance: 40e49f7b-e5f7-4673-a764-d8cec8a3cf18] six.reraise(self.value[0], self.value[1], self.value[2]) [ 766.485413] env[69367]: ERROR nova.compute.manager [instance: 40e49f7b-e5f7-4673-a764-d8cec8a3cf18] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 766.485413] env[69367]: ERROR nova.compute.manager [instance: 40e49f7b-e5f7-4673-a764-d8cec8a3cf18] raise value [ 766.485413] env[69367]: ERROR nova.compute.manager [instance: 40e49f7b-e5f7-4673-a764-d8cec8a3cf18] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 766.485967] env[69367]: ERROR nova.compute.manager [instance: 40e49f7b-e5f7-4673-a764-d8cec8a3cf18] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 766.485967] env[69367]: ERROR nova.compute.manager [instance: 40e49f7b-e5f7-4673-a764-d8cec8a3cf18] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 766.485967] env[69367]: ERROR nova.compute.manager [instance: 40e49f7b-e5f7-4673-a764-d8cec8a3cf18] self.reportclient.update_from_provider_tree( [ 766.485967] env[69367]: ERROR nova.compute.manager [instance: 40e49f7b-e5f7-4673-a764-d8cec8a3cf18] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 766.485967] env[69367]: ERROR nova.compute.manager [instance: 40e49f7b-e5f7-4673-a764-d8cec8a3cf18] with catch_all(pd.uuid): [ 766.485967] env[69367]: ERROR nova.compute.manager [instance: 40e49f7b-e5f7-4673-a764-d8cec8a3cf18] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 766.485967] env[69367]: ERROR nova.compute.manager [instance: 40e49f7b-e5f7-4673-a764-d8cec8a3cf18] self.gen.throw(typ, value, traceback) [ 766.485967] env[69367]: ERROR nova.compute.manager [instance: 40e49f7b-e5f7-4673-a764-d8cec8a3cf18] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 766.485967] env[69367]: ERROR nova.compute.manager [instance: 40e49f7b-e5f7-4673-a764-d8cec8a3cf18] raise exception.ResourceProviderSyncFailed() [ 766.485967] env[69367]: ERROR nova.compute.manager [instance: 40e49f7b-e5f7-4673-a764-d8cec8a3cf18] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 766.485967] env[69367]: ERROR nova.compute.manager [instance: 40e49f7b-e5f7-4673-a764-d8cec8a3cf18] [ 766.487399] env[69367]: DEBUG nova.compute.utils [None req-5c130717-e658-4600-96f7-a983167af7af tempest-ServersTestFqdnHostnames-2115646556 tempest-ServersTestFqdnHostnames-2115646556-project-member] [instance: 40e49f7b-e5f7-4673-a764-d8cec8a3cf18] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 766.487399] env[69367]: DEBUG oslo_concurrency.lockutils [None req-abe26bce-ad1a-4a45-984f-937494cbba7c tempest-ServersTestJSON-1126084006 tempest-ServersTestJSON-1126084006-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.997s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 766.488992] env[69367]: INFO nova.compute.claims [None req-abe26bce-ad1a-4a45-984f-937494cbba7c tempest-ServersTestJSON-1126084006 tempest-ServersTestJSON-1126084006-project-member] [instance: d6c2606d-0c6c-4add-b6f5-8229c21b56be] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 766.492280] env[69367]: DEBUG nova.compute.manager [None req-5c130717-e658-4600-96f7-a983167af7af tempest-ServersTestFqdnHostnames-2115646556 tempest-ServersTestFqdnHostnames-2115646556-project-member] [instance: 40e49f7b-e5f7-4673-a764-d8cec8a3cf18] Build of instance 40e49f7b-e5f7-4673-a764-d8cec8a3cf18 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 766.492707] env[69367]: DEBUG nova.compute.manager [None req-5c130717-e658-4600-96f7-a983167af7af tempest-ServersTestFqdnHostnames-2115646556 tempest-ServersTestFqdnHostnames-2115646556-project-member] [instance: 40e49f7b-e5f7-4673-a764-d8cec8a3cf18] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 766.492934] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5c130717-e658-4600-96f7-a983167af7af tempest-ServersTestFqdnHostnames-2115646556 tempest-ServersTestFqdnHostnames-2115646556-project-member] Acquiring lock "refresh_cache-40e49f7b-e5f7-4673-a764-d8cec8a3cf18" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 766.493115] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5c130717-e658-4600-96f7-a983167af7af tempest-ServersTestFqdnHostnames-2115646556 tempest-ServersTestFqdnHostnames-2115646556-project-member] Acquired lock "refresh_cache-40e49f7b-e5f7-4673-a764-d8cec8a3cf18" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 766.494042] env[69367]: DEBUG nova.network.neutron [None req-5c130717-e658-4600-96f7-a983167af7af tempest-ServersTestFqdnHostnames-2115646556 tempest-ServersTestFqdnHostnames-2115646556-project-member] [instance: 40e49f7b-e5f7-4673-a764-d8cec8a3cf18] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 766.527641] env[69367]: INFO nova.compute.manager [None req-05f963a1-2d97-4edd-9c3d-11be8b7b68ec tempest-ServerAddressesNegativeTestJSON-302550741 tempest-ServerAddressesNegativeTestJSON-302550741-project-member] [instance: e4db7bcc-26dd-4f0d-80da-655a58c80783] Took 1.02 seconds to deallocate network for instance. [ 766.914359] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2f962397-ad7d-47ec-8c2c-a1310a8036a7 tempest-ServersAdminNegativeTestJSON-2086084525 tempest-ServersAdminNegativeTestJSON-2086084525-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 767.015925] env[69367]: DEBUG nova.network.neutron [None req-5c130717-e658-4600-96f7-a983167af7af tempest-ServersTestFqdnHostnames-2115646556 tempest-ServersTestFqdnHostnames-2115646556-project-member] [instance: 40e49f7b-e5f7-4673-a764-d8cec8a3cf18] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 767.125067] env[69367]: DEBUG nova.network.neutron [None req-5c130717-e658-4600-96f7-a983167af7af tempest-ServersTestFqdnHostnames-2115646556 tempest-ServersTestFqdnHostnames-2115646556-project-member] [instance: 40e49f7b-e5f7-4673-a764-d8cec8a3cf18] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 767.510306] env[69367]: DEBUG oslo_concurrency.lockutils [None req-6853d6a0-2173-46e7-8542-239f48c4a2c9 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Acquiring lock "e1c7d100-4ad7-4871-970f-bb7562bfc6fc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 767.510769] env[69367]: DEBUG oslo_concurrency.lockutils [None req-6853d6a0-2173-46e7-8542-239f48c4a2c9 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Lock "e1c7d100-4ad7-4871-970f-bb7562bfc6fc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 767.511134] env[69367]: DEBUG oslo_concurrency.lockutils [None req-6853d6a0-2173-46e7-8542-239f48c4a2c9 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Acquiring lock "e1c7d100-4ad7-4871-970f-bb7562bfc6fc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 767.513020] env[69367]: DEBUG oslo_concurrency.lockutils [None req-6853d6a0-2173-46e7-8542-239f48c4a2c9 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Lock "e1c7d100-4ad7-4871-970f-bb7562bfc6fc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 767.513020] env[69367]: DEBUG oslo_concurrency.lockutils [None req-6853d6a0-2173-46e7-8542-239f48c4a2c9 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Lock "e1c7d100-4ad7-4871-970f-bb7562bfc6fc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 767.514438] env[69367]: DEBUG nova.scheduler.client.report [None req-abe26bce-ad1a-4a45-984f-937494cbba7c tempest-ServersTestJSON-1126084006 tempest-ServersTestJSON-1126084006-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 767.517162] env[69367]: INFO nova.compute.manager [None req-6853d6a0-2173-46e7-8542-239f48c4a2c9 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] Terminating instance [ 767.535633] env[69367]: DEBUG nova.scheduler.client.report [None req-abe26bce-ad1a-4a45-984f-937494cbba7c tempest-ServersTestJSON-1126084006 tempest-ServersTestJSON-1126084006-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 767.535633] env[69367]: DEBUG nova.compute.provider_tree [None req-abe26bce-ad1a-4a45-984f-937494cbba7c tempest-ServersTestJSON-1126084006 tempest-ServersTestJSON-1126084006-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 767.562143] env[69367]: DEBUG nova.scheduler.client.report [None req-abe26bce-ad1a-4a45-984f-937494cbba7c tempest-ServersTestJSON-1126084006 tempest-ServersTestJSON-1126084006-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 767.566309] env[69367]: INFO nova.scheduler.client.report [None req-05f963a1-2d97-4edd-9c3d-11be8b7b68ec tempest-ServerAddressesNegativeTestJSON-302550741 tempest-ServerAddressesNegativeTestJSON-302550741-project-member] Deleted allocations for instance e4db7bcc-26dd-4f0d-80da-655a58c80783 [ 767.584243] env[69367]: DEBUG nova.scheduler.client.report [None req-abe26bce-ad1a-4a45-984f-937494cbba7c tempest-ServersTestJSON-1126084006 tempest-ServersTestJSON-1126084006-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 767.629601] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5c130717-e658-4600-96f7-a983167af7af tempest-ServersTestFqdnHostnames-2115646556 tempest-ServersTestFqdnHostnames-2115646556-project-member] Releasing lock "refresh_cache-40e49f7b-e5f7-4673-a764-d8cec8a3cf18" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 767.629859] env[69367]: DEBUG nova.compute.manager [None req-5c130717-e658-4600-96f7-a983167af7af tempest-ServersTestFqdnHostnames-2115646556 tempest-ServersTestFqdnHostnames-2115646556-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 767.630074] env[69367]: DEBUG nova.compute.manager [None req-5c130717-e658-4600-96f7-a983167af7af tempest-ServersTestFqdnHostnames-2115646556 tempest-ServersTestFqdnHostnames-2115646556-project-member] [instance: 40e49f7b-e5f7-4673-a764-d8cec8a3cf18] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 767.630252] env[69367]: DEBUG nova.network.neutron [None req-5c130717-e658-4600-96f7-a983167af7af tempest-ServersTestFqdnHostnames-2115646556 tempest-ServersTestFqdnHostnames-2115646556-project-member] [instance: 40e49f7b-e5f7-4673-a764-d8cec8a3cf18] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 767.657134] env[69367]: DEBUG nova.network.neutron [None req-5c130717-e658-4600-96f7-a983167af7af tempest-ServersTestFqdnHostnames-2115646556 tempest-ServersTestFqdnHostnames-2115646556-project-member] [instance: 40e49f7b-e5f7-4673-a764-d8cec8a3cf18] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 767.955620] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fbe32c85-05fd-4b80-8aa6-4b5adf69b91f tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Acquiring lock "69d2e230-1c19-4a76-a517-ee7c77854f5c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 767.956029] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fbe32c85-05fd-4b80-8aa6-4b5adf69b91f tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Lock "69d2e230-1c19-4a76-a517-ee7c77854f5c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 768.027325] env[69367]: DEBUG nova.compute.manager [None req-6853d6a0-2173-46e7-8542-239f48c4a2c9 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] Start destroying the instance on the hypervisor. {{(pid=69367) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 768.027532] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-6853d6a0-2173-46e7-8542-239f48c4a2c9 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] Destroying instance {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 768.029414] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41dc394a-8c53-4908-b55f-eb7c48908803 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.037537] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-6853d6a0-2173-46e7-8542-239f48c4a2c9 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] Powering off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 768.037800] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e1628380-142b-4153-9812-aad69f0eef5f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.048426] env[69367]: DEBUG oslo_vmware.api [None req-6853d6a0-2173-46e7-8542-239f48c4a2c9 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Waiting for the task: (returnval){ [ 768.048426] env[69367]: value = "task-4233967" [ 768.048426] env[69367]: _type = "Task" [ 768.048426] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.062555] env[69367]: DEBUG oslo_vmware.api [None req-6853d6a0-2173-46e7-8542-239f48c4a2c9 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Task: {'id': task-4233967, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.079910] env[69367]: DEBUG oslo_concurrency.lockutils [None req-05f963a1-2d97-4edd-9c3d-11be8b7b68ec tempest-ServerAddressesNegativeTestJSON-302550741 tempest-ServerAddressesNegativeTestJSON-302550741-project-member] Lock "e4db7bcc-26dd-4f0d-80da-655a58c80783" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 57.696s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 768.160028] env[69367]: DEBUG nova.network.neutron [None req-5c130717-e658-4600-96f7-a983167af7af tempest-ServersTestFqdnHostnames-2115646556 tempest-ServersTestFqdnHostnames-2115646556-project-member] [instance: 40e49f7b-e5f7-4673-a764-d8cec8a3cf18] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 768.168792] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-766157fb-9a51-4ac1-8e48-4476e5caf6f7 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.177847] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62435ac4-0071-4193-b385-afe4a2394d87 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.210654] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30b940e4-eb40-4b0d-9326-7e29aba05b8e {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.219313] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab5cf0da-55ac-4e1c-8a40-6f39a49a1b57 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.237762] env[69367]: DEBUG nova.compute.provider_tree [None req-abe26bce-ad1a-4a45-984f-937494cbba7c tempest-ServersTestJSON-1126084006 tempest-ServersTestJSON-1126084006-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 768.562170] env[69367]: DEBUG oslo_vmware.api [None req-6853d6a0-2173-46e7-8542-239f48c4a2c9 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Task: {'id': task-4233967, 'name': PowerOffVM_Task, 'duration_secs': 0.225191} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.562471] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-6853d6a0-2173-46e7-8542-239f48c4a2c9 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] Powered off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 768.562637] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-6853d6a0-2173-46e7-8542-239f48c4a2c9 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] Unregistering the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 768.562893] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-57eb4ce4-6b79-495d-8ce2-acfcb98aa221 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.583449] env[69367]: DEBUG nova.compute.manager [None req-515f9e32-548d-413f-84ea-987c928671cb tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] [instance: 8ee84a56-cc49-4056-b561-aa1f2b10a06c] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 768.631114] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-6853d6a0-2173-46e7-8542-239f48c4a2c9 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] Unregistered the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 768.631381] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-6853d6a0-2173-46e7-8542-239f48c4a2c9 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] Deleting contents of the VM from datastore datastore2 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 768.632769] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-6853d6a0-2173-46e7-8542-239f48c4a2c9 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Deleting the datastore file [datastore2] e1c7d100-4ad7-4871-970f-bb7562bfc6fc {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 768.632769] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-70977d41-ced4-43bf-8f38-f77cb72f3fdc {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.640726] env[69367]: DEBUG oslo_vmware.api [None req-6853d6a0-2173-46e7-8542-239f48c4a2c9 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Waiting for the task: (returnval){ [ 768.640726] env[69367]: value = "task-4233969" [ 768.640726] env[69367]: _type = "Task" [ 768.640726] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.652372] env[69367]: DEBUG oslo_vmware.api [None req-6853d6a0-2173-46e7-8542-239f48c4a2c9 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Task: {'id': task-4233969, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.663091] env[69367]: INFO nova.compute.manager [None req-5c130717-e658-4600-96f7-a983167af7af tempest-ServersTestFqdnHostnames-2115646556 tempest-ServersTestFqdnHostnames-2115646556-project-member] [instance: 40e49f7b-e5f7-4673-a764-d8cec8a3cf18] Took 1.03 seconds to deallocate network for instance. [ 768.762064] env[69367]: ERROR nova.scheduler.client.report [None req-abe26bce-ad1a-4a45-984f-937494cbba7c tempest-ServersTestJSON-1126084006 tempest-ServersTestJSON-1126084006-project-member] [req-1584541d-ceb0-41ad-be3a-94053efe99c3] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-1584541d-ceb0-41ad-be3a-94053efe99c3"}]} [ 768.762539] env[69367]: DEBUG oslo_concurrency.lockutils [None req-abe26bce-ad1a-4a45-984f-937494cbba7c tempest-ServersTestJSON-1126084006 tempest-ServersTestJSON-1126084006-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.276s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 768.763243] env[69367]: ERROR nova.compute.manager [None req-abe26bce-ad1a-4a45-984f-937494cbba7c tempest-ServersTestJSON-1126084006 tempest-ServersTestJSON-1126084006-project-member] [instance: d6c2606d-0c6c-4add-b6f5-8229c21b56be] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 768.763243] env[69367]: ERROR nova.compute.manager [instance: d6c2606d-0c6c-4add-b6f5-8229c21b56be] Traceback (most recent call last): [ 768.763243] env[69367]: ERROR nova.compute.manager [instance: d6c2606d-0c6c-4add-b6f5-8229c21b56be] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 768.763243] env[69367]: ERROR nova.compute.manager [instance: d6c2606d-0c6c-4add-b6f5-8229c21b56be] yield [ 768.763243] env[69367]: ERROR nova.compute.manager [instance: d6c2606d-0c6c-4add-b6f5-8229c21b56be] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 768.763243] env[69367]: ERROR nova.compute.manager [instance: d6c2606d-0c6c-4add-b6f5-8229c21b56be] self.set_inventory_for_provider( [ 768.763243] env[69367]: ERROR nova.compute.manager [instance: d6c2606d-0c6c-4add-b6f5-8229c21b56be] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 768.763243] env[69367]: ERROR nova.compute.manager [instance: d6c2606d-0c6c-4add-b6f5-8229c21b56be] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 768.763473] env[69367]: ERROR nova.compute.manager [instance: d6c2606d-0c6c-4add-b6f5-8229c21b56be] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-1584541d-ceb0-41ad-be3a-94053efe99c3"}]} [ 768.763473] env[69367]: ERROR nova.compute.manager [instance: d6c2606d-0c6c-4add-b6f5-8229c21b56be] [ 768.763473] env[69367]: ERROR nova.compute.manager [instance: d6c2606d-0c6c-4add-b6f5-8229c21b56be] During handling of the above exception, another exception occurred: [ 768.763473] env[69367]: ERROR nova.compute.manager [instance: d6c2606d-0c6c-4add-b6f5-8229c21b56be] [ 768.763473] env[69367]: ERROR nova.compute.manager [instance: d6c2606d-0c6c-4add-b6f5-8229c21b56be] Traceback (most recent call last): [ 768.763473] env[69367]: ERROR nova.compute.manager [instance: d6c2606d-0c6c-4add-b6f5-8229c21b56be] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 768.763473] env[69367]: ERROR nova.compute.manager [instance: d6c2606d-0c6c-4add-b6f5-8229c21b56be] with self.rt.instance_claim(context, instance, node, allocs, [ 768.763473] env[69367]: ERROR nova.compute.manager [instance: d6c2606d-0c6c-4add-b6f5-8229c21b56be] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 768.763473] env[69367]: ERROR nova.compute.manager [instance: d6c2606d-0c6c-4add-b6f5-8229c21b56be] return f(*args, **kwargs) [ 768.763727] env[69367]: ERROR nova.compute.manager [instance: d6c2606d-0c6c-4add-b6f5-8229c21b56be] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 768.763727] env[69367]: ERROR nova.compute.manager [instance: d6c2606d-0c6c-4add-b6f5-8229c21b56be] self._update(elevated, cn) [ 768.763727] env[69367]: ERROR nova.compute.manager [instance: d6c2606d-0c6c-4add-b6f5-8229c21b56be] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 768.763727] env[69367]: ERROR nova.compute.manager [instance: d6c2606d-0c6c-4add-b6f5-8229c21b56be] self._update_to_placement(context, compute_node, startup) [ 768.763727] env[69367]: ERROR nova.compute.manager [instance: d6c2606d-0c6c-4add-b6f5-8229c21b56be] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 768.763727] env[69367]: ERROR nova.compute.manager [instance: d6c2606d-0c6c-4add-b6f5-8229c21b56be] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 768.763727] env[69367]: ERROR nova.compute.manager [instance: d6c2606d-0c6c-4add-b6f5-8229c21b56be] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 768.763727] env[69367]: ERROR nova.compute.manager [instance: d6c2606d-0c6c-4add-b6f5-8229c21b56be] return attempt.get(self._wrap_exception) [ 768.763727] env[69367]: ERROR nova.compute.manager [instance: d6c2606d-0c6c-4add-b6f5-8229c21b56be] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 768.763727] env[69367]: ERROR nova.compute.manager [instance: d6c2606d-0c6c-4add-b6f5-8229c21b56be] six.reraise(self.value[0], self.value[1], self.value[2]) [ 768.763727] env[69367]: ERROR nova.compute.manager [instance: d6c2606d-0c6c-4add-b6f5-8229c21b56be] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 768.763727] env[69367]: ERROR nova.compute.manager [instance: d6c2606d-0c6c-4add-b6f5-8229c21b56be] raise value [ 768.763727] env[69367]: ERROR nova.compute.manager [instance: d6c2606d-0c6c-4add-b6f5-8229c21b56be] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 768.764231] env[69367]: ERROR nova.compute.manager [instance: d6c2606d-0c6c-4add-b6f5-8229c21b56be] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 768.764231] env[69367]: ERROR nova.compute.manager [instance: d6c2606d-0c6c-4add-b6f5-8229c21b56be] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 768.764231] env[69367]: ERROR nova.compute.manager [instance: d6c2606d-0c6c-4add-b6f5-8229c21b56be] self.reportclient.update_from_provider_tree( [ 768.764231] env[69367]: ERROR nova.compute.manager [instance: d6c2606d-0c6c-4add-b6f5-8229c21b56be] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 768.764231] env[69367]: ERROR nova.compute.manager [instance: d6c2606d-0c6c-4add-b6f5-8229c21b56be] with catch_all(pd.uuid): [ 768.764231] env[69367]: ERROR nova.compute.manager [instance: d6c2606d-0c6c-4add-b6f5-8229c21b56be] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 768.764231] env[69367]: ERROR nova.compute.manager [instance: d6c2606d-0c6c-4add-b6f5-8229c21b56be] self.gen.throw(typ, value, traceback) [ 768.764231] env[69367]: ERROR nova.compute.manager [instance: d6c2606d-0c6c-4add-b6f5-8229c21b56be] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 768.764231] env[69367]: ERROR nova.compute.manager [instance: d6c2606d-0c6c-4add-b6f5-8229c21b56be] raise exception.ResourceProviderSyncFailed() [ 768.764231] env[69367]: ERROR nova.compute.manager [instance: d6c2606d-0c6c-4add-b6f5-8229c21b56be] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 768.764231] env[69367]: ERROR nova.compute.manager [instance: d6c2606d-0c6c-4add-b6f5-8229c21b56be] [ 768.764591] env[69367]: DEBUG nova.compute.utils [None req-abe26bce-ad1a-4a45-984f-937494cbba7c tempest-ServersTestJSON-1126084006 tempest-ServersTestJSON-1126084006-project-member] [instance: d6c2606d-0c6c-4add-b6f5-8229c21b56be] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 768.766753] env[69367]: DEBUG nova.compute.manager [None req-abe26bce-ad1a-4a45-984f-937494cbba7c tempest-ServersTestJSON-1126084006 tempest-ServersTestJSON-1126084006-project-member] [instance: d6c2606d-0c6c-4add-b6f5-8229c21b56be] Build of instance d6c2606d-0c6c-4add-b6f5-8229c21b56be was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 768.767261] env[69367]: DEBUG nova.compute.manager [None req-abe26bce-ad1a-4a45-984f-937494cbba7c tempest-ServersTestJSON-1126084006 tempest-ServersTestJSON-1126084006-project-member] [instance: d6c2606d-0c6c-4add-b6f5-8229c21b56be] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 768.767580] env[69367]: DEBUG oslo_concurrency.lockutils [None req-abe26bce-ad1a-4a45-984f-937494cbba7c tempest-ServersTestJSON-1126084006 tempest-ServersTestJSON-1126084006-project-member] Acquiring lock "refresh_cache-d6c2606d-0c6c-4add-b6f5-8229c21b56be" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 768.767795] env[69367]: DEBUG oslo_concurrency.lockutils [None req-abe26bce-ad1a-4a45-984f-937494cbba7c tempest-ServersTestJSON-1126084006 tempest-ServersTestJSON-1126084006-project-member] Acquired lock "refresh_cache-d6c2606d-0c6c-4add-b6f5-8229c21b56be" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 768.768039] env[69367]: DEBUG nova.network.neutron [None req-abe26bce-ad1a-4a45-984f-937494cbba7c tempest-ServersTestJSON-1126084006 tempest-ServersTestJSON-1126084006-project-member] [instance: d6c2606d-0c6c-4add-b6f5-8229c21b56be] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 768.769824] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.203s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 768.771657] env[69367]: INFO nova.compute.claims [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 769.113044] env[69367]: DEBUG oslo_concurrency.lockutils [None req-515f9e32-548d-413f-84ea-987c928671cb tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 769.151949] env[69367]: DEBUG oslo_vmware.api [None req-6853d6a0-2173-46e7-8542-239f48c4a2c9 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Task: {'id': task-4233969, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.165748} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.152856] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-6853d6a0-2173-46e7-8542-239f48c4a2c9 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Deleted the datastore file {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 769.153092] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-6853d6a0-2173-46e7-8542-239f48c4a2c9 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] Deleted contents of the VM from datastore datastore2 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 769.154141] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-6853d6a0-2173-46e7-8542-239f48c4a2c9 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] Instance destroyed {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 769.154141] env[69367]: INFO nova.compute.manager [None req-6853d6a0-2173-46e7-8542-239f48c4a2c9 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] Took 1.13 seconds to destroy the instance on the hypervisor. [ 769.154141] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6853d6a0-2173-46e7-8542-239f48c4a2c9 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 769.154141] env[69367]: DEBUG nova.compute.manager [-] [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 769.154141] env[69367]: DEBUG nova.network.neutron [-] [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 769.318029] env[69367]: DEBUG nova.network.neutron [None req-abe26bce-ad1a-4a45-984f-937494cbba7c tempest-ServersTestJSON-1126084006 tempest-ServersTestJSON-1126084006-project-member] [instance: d6c2606d-0c6c-4add-b6f5-8229c21b56be] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 769.528359] env[69367]: DEBUG nova.network.neutron [None req-abe26bce-ad1a-4a45-984f-937494cbba7c tempest-ServersTestJSON-1126084006 tempest-ServersTestJSON-1126084006-project-member] [instance: d6c2606d-0c6c-4add-b6f5-8229c21b56be] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 769.669533] env[69367]: DEBUG nova.compute.manager [req-428403d2-ca5c-4810-b6c6-30cb1577b76a req-f94c0a1d-98bb-4534-b50f-fccb2a042f4a service nova] [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] Received event network-vif-deleted-013efad5-0b57-43e9-b662-10e31d24d8af {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 769.669683] env[69367]: INFO nova.compute.manager [req-428403d2-ca5c-4810-b6c6-30cb1577b76a req-f94c0a1d-98bb-4534-b50f-fccb2a042f4a service nova] [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] Neutron deleted interface 013efad5-0b57-43e9-b662-10e31d24d8af; detaching it from the instance and deleting it from the info cache [ 769.669843] env[69367]: DEBUG nova.network.neutron [req-428403d2-ca5c-4810-b6c6-30cb1577b76a req-f94c0a1d-98bb-4534-b50f-fccb2a042f4a service nova] [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 769.706553] env[69367]: INFO nova.scheduler.client.report [None req-5c130717-e658-4600-96f7-a983167af7af tempest-ServersTestFqdnHostnames-2115646556 tempest-ServersTestFqdnHostnames-2115646556-project-member] Deleted allocations for instance 40e49f7b-e5f7-4673-a764-d8cec8a3cf18 [ 769.800378] env[69367]: DEBUG nova.scheduler.client.report [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 769.815611] env[69367]: DEBUG nova.scheduler.client.report [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 769.815779] env[69367]: DEBUG nova.compute.provider_tree [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 769.829793] env[69367]: DEBUG nova.scheduler.client.report [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 769.849023] env[69367]: DEBUG nova.scheduler.client.report [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 770.032447] env[69367]: DEBUG oslo_concurrency.lockutils [None req-abe26bce-ad1a-4a45-984f-937494cbba7c tempest-ServersTestJSON-1126084006 tempest-ServersTestJSON-1126084006-project-member] Releasing lock "refresh_cache-d6c2606d-0c6c-4add-b6f5-8229c21b56be" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 770.032643] env[69367]: DEBUG nova.compute.manager [None req-abe26bce-ad1a-4a45-984f-937494cbba7c tempest-ServersTestJSON-1126084006 tempest-ServersTestJSON-1126084006-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 770.033441] env[69367]: DEBUG nova.compute.manager [None req-abe26bce-ad1a-4a45-984f-937494cbba7c tempest-ServersTestJSON-1126084006 tempest-ServersTestJSON-1126084006-project-member] [instance: d6c2606d-0c6c-4add-b6f5-8229c21b56be] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 770.033863] env[69367]: DEBUG nova.network.neutron [None req-abe26bce-ad1a-4a45-984f-937494cbba7c tempest-ServersTestJSON-1126084006 tempest-ServersTestJSON-1126084006-project-member] [instance: d6c2606d-0c6c-4add-b6f5-8229c21b56be] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 770.062804] env[69367]: DEBUG nova.network.neutron [None req-abe26bce-ad1a-4a45-984f-937494cbba7c tempest-ServersTestJSON-1126084006 tempest-ServersTestJSON-1126084006-project-member] [instance: d6c2606d-0c6c-4add-b6f5-8229c21b56be] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 770.140783] env[69367]: DEBUG nova.network.neutron [-] [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 770.173275] env[69367]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5cda99d4-9dcc-4e19-a79d-dceb9866ad7c {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.184499] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-987d6f0d-530a-4a03-9e36-a2ad912efffb {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.221011] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5c130717-e658-4600-96f7-a983167af7af tempest-ServersTestFqdnHostnames-2115646556 tempest-ServersTestFqdnHostnames-2115646556-project-member] Lock "40e49f7b-e5f7-4673-a764-d8cec8a3cf18" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 59.552s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 770.221400] env[69367]: DEBUG nova.compute.manager [req-428403d2-ca5c-4810-b6c6-30cb1577b76a req-f94c0a1d-98bb-4534-b50f-fccb2a042f4a service nova] [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] Detach interface failed, port_id=013efad5-0b57-43e9-b662-10e31d24d8af, reason: Instance e1c7d100-4ad7-4871-970f-bb7562bfc6fc could not be found. {{(pid=69367) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11601}} [ 770.365682] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2b62d89-b7f7-4b94-a8cb-344bd39d1d9d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.374666] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebe4105e-0f30-48ba-ad48-fb1fc91522ea {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.414248] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5634fbb1-0fd6-49a1-8c6f-373bb0a4bc73 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.422735] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5813407-36a0-41a1-b62f-0c62d9504cd9 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.437329] env[69367]: DEBUG nova.compute.provider_tree [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Inventory has not changed in ProviderTree for provider: 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 770.565743] env[69367]: DEBUG nova.network.neutron [None req-abe26bce-ad1a-4a45-984f-937494cbba7c tempest-ServersTestJSON-1126084006 tempest-ServersTestJSON-1126084006-project-member] [instance: d6c2606d-0c6c-4add-b6f5-8229c21b56be] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 770.646274] env[69367]: INFO nova.compute.manager [-] [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] Took 1.49 seconds to deallocate network for instance. [ 770.726608] env[69367]: DEBUG nova.compute.manager [None req-a085c230-35c0-4225-8a3b-f135e0165425 tempest-VolumesAdminNegativeTest-6889650 tempest-VolumesAdminNegativeTest-6889650-project-member] [instance: d7009e78-b9f4-47e8-ba29-dfc710bef8ad] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 770.945028] env[69367]: DEBUG nova.scheduler.client.report [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Inventory has not changed for provider 19ddf8be-7305-4f70-8366-52a9957232e6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 771.071918] env[69367]: INFO nova.compute.manager [None req-abe26bce-ad1a-4a45-984f-937494cbba7c tempest-ServersTestJSON-1126084006 tempest-ServersTestJSON-1126084006-project-member] [instance: d6c2606d-0c6c-4add-b6f5-8229c21b56be] Took 1.03 seconds to deallocate network for instance. [ 771.159527] env[69367]: DEBUG oslo_concurrency.lockutils [None req-6853d6a0-2173-46e7-8542-239f48c4a2c9 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 771.260277] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a085c230-35c0-4225-8a3b-f135e0165425 tempest-VolumesAdminNegativeTest-6889650 tempest-VolumesAdminNegativeTest-6889650-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 771.448933] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.678s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 771.448933] env[69367]: DEBUG nova.compute.manager [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] Start building networks asynchronously for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 771.451710] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.855s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 771.451899] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 771.459638] env[69367]: DEBUG oslo_concurrency.lockutils [None req-b4416836-0f76-4b31-aa99-b0fec3a75acb tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.756s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 771.928193] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-068770a3-7b9e-4da0-9e68-a8c79d10693a {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.938173] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-535127e8-30c2-4a39-b446-f9b25305fabd {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.975732] env[69367]: DEBUG nova.compute.utils [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Using /dev/sd instead of None {{(pid=69367) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 771.978431] env[69367]: DEBUG nova.compute.manager [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] Allocating IP information in the background. {{(pid=69367) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 771.978630] env[69367]: DEBUG nova.network.neutron [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] allocate_for_instance() {{(pid=69367) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 771.980831] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1155152-bc7f-4e68-b439-b0f28a8faf6c {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.983807] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2fa12bdd-aebb-46e6-a529-1cfa37dcf410 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Lock "8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 49.826s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 771.984711] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a1156e1e-41ad-40b1-a520-164694ed7948 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Lock "8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 27.667s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 771.986228] env[69367]: INFO nova.compute.manager [None req-a1156e1e-41ad-40b1-a520-164694ed7948 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Unshelving [ 771.998248] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cf71019-e833-4912-9355-2d4dc2067b10 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.014303] env[69367]: DEBUG nova.compute.provider_tree [None req-b4416836-0f76-4b31-aa99-b0fec3a75acb tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Inventory has not changed in ProviderTree for provider: 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 772.109584] env[69367]: INFO nova.scheduler.client.report [None req-abe26bce-ad1a-4a45-984f-937494cbba7c tempest-ServersTestJSON-1126084006 tempest-ServersTestJSON-1126084006-project-member] Deleted allocations for instance d6c2606d-0c6c-4add-b6f5-8229c21b56be [ 772.293175] env[69367]: DEBUG nova.policy [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3a7bbf03595642c3b42cc5e9f5b79bc0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9c8ac08a704e476fbe794f66f61e27a5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69367) authorize /opt/stack/nova/nova/policy.py:192}} [ 772.486750] env[69367]: DEBUG nova.compute.manager [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] Start building block device mappings for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 772.517274] env[69367]: DEBUG nova.scheduler.client.report [None req-b4416836-0f76-4b31-aa99-b0fec3a75acb tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Inventory has not changed for provider 19ddf8be-7305-4f70-8366-52a9957232e6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 772.621040] env[69367]: DEBUG oslo_concurrency.lockutils [None req-abe26bce-ad1a-4a45-984f-937494cbba7c tempest-ServersTestJSON-1126084006 tempest-ServersTestJSON-1126084006-project-member] Lock "d6c2606d-0c6c-4add-b6f5-8229c21b56be" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 61.846s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 772.751066] env[69367]: DEBUG nova.network.neutron [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] Successfully created port: 12a5e1ba-1f50-4421-a715-d2e4cbb74000 {{(pid=69367) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 773.008868] env[69367]: DEBUG nova.compute.utils [None req-a1156e1e-41ad-40b1-a520-164694ed7948 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Using /dev/sd instead of None {{(pid=69367) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 773.023404] env[69367]: DEBUG oslo_concurrency.lockutils [None req-b4416836-0f76-4b31-aa99-b0fec3a75acb tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.563s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 773.023747] env[69367]: INFO nova.compute.manager [None req-b4416836-0f76-4b31-aa99-b0fec3a75acb tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] [instance: 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57] Successfully reverted task state from None on failure for instance. [ 773.027342] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.291s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 773.034351] env[69367]: INFO nova.compute.claims [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 773.038018] env[69367]: ERROR oslo_messaging.rpc.server [None req-b4416836-0f76-4b31-aa99-b0fec3a75acb tempest-ServerMetadataTestJSON-698858 tempest-ServerMetadataTestJSON-698858-project-member] Exception during message handling: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 773.038018] env[69367]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 773.038018] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 773.038018] env[69367]: ERROR oslo_messaging.rpc.server yield [ 773.038018] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 773.038018] env[69367]: ERROR oslo_messaging.rpc.server self.set_inventory_for_provider( [ 773.038018] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 773.038018] env[69367]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 773.038018] env[69367]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-197fdfd3-1285-4f7a-a5a2-854dc64a12d5"}]} [ 773.038018] env[69367]: ERROR oslo_messaging.rpc.server [ 773.038953] env[69367]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 773.038953] env[69367]: ERROR oslo_messaging.rpc.server [ 773.038953] env[69367]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 773.038953] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 174, in _process_incoming [ 773.038953] env[69367]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 773.038953] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 773.038953] env[69367]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 773.038953] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 773.038953] env[69367]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 773.038953] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 773.038953] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 773.038953] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 773.038953] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 773.038953] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 773.038953] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 773.038953] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 773.038953] env[69367]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 773.038953] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 167, in decorated_function [ 773.039408] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 773.039408] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 773.039408] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 773.039408] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 773.039408] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 773.039408] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 158, in decorated_function [ 773.039408] env[69367]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 773.039408] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1483, in decorated_function [ 773.039408] env[69367]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 773.039408] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 214, in decorated_function [ 773.039408] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 773.039408] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 773.039408] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 773.039408] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 773.039408] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 773.039408] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 204, in decorated_function [ 773.039408] env[69367]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 773.039408] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3388, in terminate_instance [ 773.039855] env[69367]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 773.039855] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 773.039855] env[69367]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 773.039855] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3383, in do_terminate_instance [ 773.039855] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 773.039855] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 773.039855] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 773.039855] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 773.039855] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 773.039855] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 773.039855] env[69367]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 773.039855] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 773.039855] env[69367]: ERROR oslo_messaging.rpc.server self._complete_deletion(context, instance) [ 773.039855] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 773.039855] env[69367]: ERROR oslo_messaging.rpc.server self._update_resource_tracker(context, instance) [ 773.039855] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 773.039855] env[69367]: ERROR oslo_messaging.rpc.server self.rt.update_usage(context, instance, instance.node) [ 773.039855] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 773.040424] env[69367]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 773.040424] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 773.040424] env[69367]: ERROR oslo_messaging.rpc.server self._update(context.elevated(), self.compute_nodes[nodename]) [ 773.040424] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 773.040424] env[69367]: ERROR oslo_messaging.rpc.server self._update_to_placement(context, compute_node, startup) [ 773.040424] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 773.040424] env[69367]: ERROR oslo_messaging.rpc.server return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 773.040424] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 773.040424] env[69367]: ERROR oslo_messaging.rpc.server return attempt.get(self._wrap_exception) [ 773.040424] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 773.040424] env[69367]: ERROR oslo_messaging.rpc.server six.reraise(self.value[0], self.value[1], self.value[2]) [ 773.040424] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 773.040424] env[69367]: ERROR oslo_messaging.rpc.server raise value [ 773.040424] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 773.040424] env[69367]: ERROR oslo_messaging.rpc.server attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 773.040424] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 773.040424] env[69367]: ERROR oslo_messaging.rpc.server self.reportclient.update_from_provider_tree( [ 773.040841] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 773.040841] env[69367]: ERROR oslo_messaging.rpc.server with catch_all(pd.uuid): [ 773.040841] env[69367]: ERROR oslo_messaging.rpc.server File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 773.040841] env[69367]: ERROR oslo_messaging.rpc.server self.gen.throw(typ, value, traceback) [ 773.040841] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 773.040841] env[69367]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderSyncFailed() [ 773.040841] env[69367]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 773.040841] env[69367]: ERROR oslo_messaging.rpc.server [ 773.126113] env[69367]: DEBUG nova.compute.manager [None req-71e59610-5b9a-47a8-a1c2-ad48771fbdde tempest-ServerShowV247Test-1137483895 tempest-ServerShowV247Test-1137483895-project-member] [instance: f0522b69-b593-404e-8f24-b6c5c6c8b2e4] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 773.507777] env[69367]: DEBUG nova.compute.manager [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] Start spawning the instance on the hypervisor. {{(pid=69367) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 773.515151] env[69367]: INFO nova.virt.block_device [None req-a1156e1e-41ad-40b1-a520-164694ed7948 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Booting with volume ad137bb7-0767-4b92-8013-b1f28493e426 at /dev/sdb [ 773.540777] env[69367]: DEBUG nova.virt.hardware [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-19T12:49:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-19T12:49:28Z,direct_url=,disk_format='vmdk',id=2b099420-9152-4d93-9609-4c9317824c11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cd7c200d5cd6461fb951580f8c764c42',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-19T12:49:29Z,virtual_size=,visibility=), allow threads: False {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 773.541781] env[69367]: DEBUG nova.virt.hardware [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Flavor limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 773.542181] env[69367]: DEBUG nova.virt.hardware [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Image limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 773.542511] env[69367]: DEBUG nova.virt.hardware [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Flavor pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 773.542777] env[69367]: DEBUG nova.virt.hardware [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Image pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 773.543379] env[69367]: DEBUG nova.virt.hardware [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 773.544141] env[69367]: DEBUG nova.virt.hardware [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 773.544545] env[69367]: DEBUG nova.virt.hardware [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 773.545761] env[69367]: DEBUG nova.virt.hardware [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Got 1 possible topologies {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 773.548034] env[69367]: DEBUG nova.virt.hardware [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 773.548034] env[69367]: DEBUG nova.virt.hardware [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 773.551640] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a986e8fb-072c-4c79-accb-1a2b47f5d795 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.558908] env[69367]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a47e911b-502c-4556-9489-830580c86608 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.565450] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6189a2e-5c5f-474d-986c-791c2c8bc4e6 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.578778] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94461ee7-6693-4796-9b7d-38963634ff13 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.624271] env[69367]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2acc9d4a-e456-4dc7-a7a0-7721199069ec {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.636223] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4817d878-9805-430b-b153-905d01c341c3 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.668657] env[69367]: DEBUG oslo_concurrency.lockutils [None req-71e59610-5b9a-47a8-a1c2-ad48771fbdde tempest-ServerShowV247Test-1137483895 tempest-ServerShowV247Test-1137483895-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 773.669510] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9163cc4c-2b13-4a05-9b65-c1658e82e636 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.677286] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75614b59-9109-40ce-9fa4-768e73113830 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.696994] env[69367]: DEBUG nova.virt.block_device [None req-a1156e1e-41ad-40b1-a520-164694ed7948 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Updating existing volume attachment record: 4eaa7eef-873d-4245-a140-ce37e4a14adc {{(pid=69367) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 774.371584] env[69367]: DEBUG nova.compute.manager [req-38114c36-942b-4f03-9ba0-e661a15c5471 req-ca1a9c69-6fa8-4888-99dd-3f3f893a21e9 service nova] [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] Received event network-vif-plugged-12a5e1ba-1f50-4421-a715-d2e4cbb74000 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 774.371882] env[69367]: DEBUG oslo_concurrency.lockutils [req-38114c36-942b-4f03-9ba0-e661a15c5471 req-ca1a9c69-6fa8-4888-99dd-3f3f893a21e9 service nova] Acquiring lock "7f937d89-684b-44f5-9f30-783aeafe99d1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 774.372321] env[69367]: DEBUG oslo_concurrency.lockutils [req-38114c36-942b-4f03-9ba0-e661a15c5471 req-ca1a9c69-6fa8-4888-99dd-3f3f893a21e9 service nova] Lock "7f937d89-684b-44f5-9f30-783aeafe99d1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 774.372492] env[69367]: DEBUG oslo_concurrency.lockutils [req-38114c36-942b-4f03-9ba0-e661a15c5471 req-ca1a9c69-6fa8-4888-99dd-3f3f893a21e9 service nova] Lock "7f937d89-684b-44f5-9f30-783aeafe99d1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 774.372802] env[69367]: DEBUG nova.compute.manager [req-38114c36-942b-4f03-9ba0-e661a15c5471 req-ca1a9c69-6fa8-4888-99dd-3f3f893a21e9 service nova] [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] No waiting events found dispatching network-vif-plugged-12a5e1ba-1f50-4421-a715-d2e4cbb74000 {{(pid=69367) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 774.373210] env[69367]: WARNING nova.compute.manager [req-38114c36-942b-4f03-9ba0-e661a15c5471 req-ca1a9c69-6fa8-4888-99dd-3f3f893a21e9 service nova] [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] Received unexpected event network-vif-plugged-12a5e1ba-1f50-4421-a715-d2e4cbb74000 for instance with vm_state building and task_state spawning. [ 774.579533] env[69367]: DEBUG nova.network.neutron [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] Successfully updated port: 12a5e1ba-1f50-4421-a715-d2e4cbb74000 {{(pid=69367) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 774.606414] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9040d718-f0fd-4650-ab7d-b9121f2c2732 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.617044] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f25dcf50-b5f7-44b5-aada-d091fdd24c21 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.655058] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6eef5e7c-c961-4a2f-95ad-8122ecb1c1d3 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.663913] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab6bcf68-5e6b-4a42-9368-6407467b6b17 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.679136] env[69367]: DEBUG nova.compute.provider_tree [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Inventory has not changed in ProviderTree for provider: 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 775.081855] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Acquiring lock "refresh_cache-7f937d89-684b-44f5-9f30-783aeafe99d1" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 775.082131] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Acquired lock "refresh_cache-7f937d89-684b-44f5-9f30-783aeafe99d1" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 775.082377] env[69367]: DEBUG nova.network.neutron [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 775.184937] env[69367]: DEBUG nova.scheduler.client.report [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Inventory has not changed for provider 19ddf8be-7305-4f70-8366-52a9957232e6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 775.632527] env[69367]: DEBUG nova.network.neutron [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 775.691140] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.664s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 775.691684] env[69367]: DEBUG nova.compute.manager [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] Start building networks asynchronously for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 775.701425] env[69367]: DEBUG oslo_concurrency.lockutils [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.164s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 775.705399] env[69367]: INFO nova.compute.claims [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 775.909500] env[69367]: DEBUG nova.network.neutron [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] Updating instance_info_cache with network_info: [{"id": "12a5e1ba-1f50-4421-a715-d2e4cbb74000", "address": "fa:16:3e:da:51:45", "network": {"id": "e89e8083-43a5-4e6e-aa58-98ecffb55d24", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-76221324-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9c8ac08a704e476fbe794f66f61e27a5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d4ef133-b6f3-41d1-add4-92a1482195cf", "external-id": "nsx-vlan-transportzone-446", "segmentation_id": 446, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap12a5e1ba-1f", "ovs_interfaceid": "12a5e1ba-1f50-4421-a715-d2e4cbb74000", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 776.210511] env[69367]: DEBUG nova.compute.utils [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Using /dev/sd instead of None {{(pid=69367) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 776.216685] env[69367]: DEBUG nova.compute.manager [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] Allocating IP information in the background. {{(pid=69367) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 776.216881] env[69367]: DEBUG nova.network.neutron [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] allocate_for_instance() {{(pid=69367) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 776.285777] env[69367]: DEBUG nova.policy [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3a7bbf03595642c3b42cc5e9f5b79bc0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9c8ac08a704e476fbe794f66f61e27a5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69367) authorize /opt/stack/nova/nova/policy.py:192}} [ 776.404294] env[69367]: DEBUG nova.compute.manager [req-8d649e5f-85e2-42ff-8d2e-6fb939fc8de6 req-1ebd328b-08e8-4547-8f1d-4e4314cf9849 service nova] [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] Received event network-changed-12a5e1ba-1f50-4421-a715-d2e4cbb74000 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 776.404294] env[69367]: DEBUG nova.compute.manager [req-8d649e5f-85e2-42ff-8d2e-6fb939fc8de6 req-1ebd328b-08e8-4547-8f1d-4e4314cf9849 service nova] [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] Refreshing instance network info cache due to event network-changed-12a5e1ba-1f50-4421-a715-d2e4cbb74000. {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11772}} [ 776.404294] env[69367]: DEBUG oslo_concurrency.lockutils [req-8d649e5f-85e2-42ff-8d2e-6fb939fc8de6 req-1ebd328b-08e8-4547-8f1d-4e4314cf9849 service nova] Acquiring lock "refresh_cache-7f937d89-684b-44f5-9f30-783aeafe99d1" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 776.417532] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Releasing lock "refresh_cache-7f937d89-684b-44f5-9f30-783aeafe99d1" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 776.418404] env[69367]: DEBUG nova.compute.manager [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] Instance network_info: |[{"id": "12a5e1ba-1f50-4421-a715-d2e4cbb74000", "address": "fa:16:3e:da:51:45", "network": {"id": "e89e8083-43a5-4e6e-aa58-98ecffb55d24", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-76221324-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9c8ac08a704e476fbe794f66f61e27a5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d4ef133-b6f3-41d1-add4-92a1482195cf", "external-id": "nsx-vlan-transportzone-446", "segmentation_id": 446, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap12a5e1ba-1f", "ovs_interfaceid": "12a5e1ba-1f50-4421-a715-d2e4cbb74000", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69367) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 776.418817] env[69367]: DEBUG oslo_concurrency.lockutils [req-8d649e5f-85e2-42ff-8d2e-6fb939fc8de6 req-1ebd328b-08e8-4547-8f1d-4e4314cf9849 service nova] Acquired lock "refresh_cache-7f937d89-684b-44f5-9f30-783aeafe99d1" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 776.420069] env[69367]: DEBUG nova.network.neutron [req-8d649e5f-85e2-42ff-8d2e-6fb939fc8de6 req-1ebd328b-08e8-4547-8f1d-4e4314cf9849 service nova] [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] Refreshing network info cache for port 12a5e1ba-1f50-4421-a715-d2e4cbb74000 {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 776.420431] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:da:51:45', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6d4ef133-b6f3-41d1-add4-92a1482195cf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '12a5e1ba-1f50-4421-a715-d2e4cbb74000', 'vif_model': 'vmxnet3'}] {{(pid=69367) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 776.429461] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 776.430245] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] Creating VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 776.430513] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a8c96daa-0863-4a75-a66a-0586573b017f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.452704] env[69367]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 776.452704] env[69367]: value = "task-4233974" [ 776.452704] env[69367]: _type = "Task" [ 776.452704] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.461729] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4233974, 'name': CreateVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.607124] env[69367]: DEBUG nova.network.neutron [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] Successfully created port: 1da2a1bd-4886-4d63-82a9-cd4fdd2c31d5 {{(pid=69367) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 776.718221] env[69367]: DEBUG nova.compute.manager [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] Start building block device mappings for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 776.963308] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4233974, 'name': CreateVM_Task, 'duration_secs': 0.311254} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.963492] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] Created VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 776.964286] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 776.964374] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 776.964686] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 776.964939] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ed5903ca-08d8-4d88-a6ba-266f5694c2e8 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.973398] env[69367]: DEBUG oslo_vmware.api [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Waiting for the task: (returnval){ [ 776.973398] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]5229f1c8-e6f9-5d6a-b5a1-ef2ffa8d3065" [ 776.973398] env[69367]: _type = "Task" [ 776.973398] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.986104] env[69367]: DEBUG oslo_vmware.api [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]5229f1c8-e6f9-5d6a-b5a1-ef2ffa8d3065, 'name': SearchDatastore_Task, 'duration_secs': 0.011505} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.986468] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 776.986794] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] Processing image 2b099420-9152-4d93-9609-4c9317824c11 {{(pid=69367) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 776.987318] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 776.987523] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 776.987755] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 776.988303] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8c01e5d2-364b-4d8b-aba1-f08cc0976b54 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.001225] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 777.001423] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69367) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 777.002250] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e9623918-689b-45ff-a180-10cbb6d55013 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.011268] env[69367]: DEBUG oslo_vmware.api [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Waiting for the task: (returnval){ [ 777.011268] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]525d1ce4-7a8b-ee39-e262-a4b614ac3baf" [ 777.011268] env[69367]: _type = "Task" [ 777.011268] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.023439] env[69367]: DEBUG oslo_vmware.api [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]525d1ce4-7a8b-ee39-e262-a4b614ac3baf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.249547] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d07477a-3d62-4014-bf8c-152e649f23be {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.260293] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8af16f18-a3f9-46d7-be83-a53116d9e4cf {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.296751] env[69367]: DEBUG nova.network.neutron [req-8d649e5f-85e2-42ff-8d2e-6fb939fc8de6 req-1ebd328b-08e8-4547-8f1d-4e4314cf9849 service nova] [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] Updated VIF entry in instance network info cache for port 12a5e1ba-1f50-4421-a715-d2e4cbb74000. {{(pid=69367) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 777.297156] env[69367]: DEBUG nova.network.neutron [req-8d649e5f-85e2-42ff-8d2e-6fb939fc8de6 req-1ebd328b-08e8-4547-8f1d-4e4314cf9849 service nova] [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] Updating instance_info_cache with network_info: [{"id": "12a5e1ba-1f50-4421-a715-d2e4cbb74000", "address": "fa:16:3e:da:51:45", "network": {"id": "e89e8083-43a5-4e6e-aa58-98ecffb55d24", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-76221324-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9c8ac08a704e476fbe794f66f61e27a5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d4ef133-b6f3-41d1-add4-92a1482195cf", "external-id": "nsx-vlan-transportzone-446", "segmentation_id": 446, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap12a5e1ba-1f", "ovs_interfaceid": "12a5e1ba-1f50-4421-a715-d2e4cbb74000", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 777.299030] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12dde894-4176-44c5-94c3-955eb0a3587b {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.308539] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-947f15d9-abae-47eb-8e32-ba9d46e35fab {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.323519] env[69367]: DEBUG nova.compute.provider_tree [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Inventory has not changed in ProviderTree for provider: 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 777.533273] env[69367]: DEBUG oslo_vmware.api [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]525d1ce4-7a8b-ee39-e262-a4b614ac3baf, 'name': SearchDatastore_Task, 'duration_secs': 0.009876} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.534313] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-afc08b1e-12b3-427b-bbeb-3033bed7c9ce {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.540984] env[69367]: DEBUG oslo_vmware.api [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Waiting for the task: (returnval){ [ 777.540984] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]5277149e-228a-142a-f17d-2866a49a8d3a" [ 777.540984] env[69367]: _type = "Task" [ 777.540984] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.549910] env[69367]: DEBUG oslo_vmware.api [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]5277149e-228a-142a-f17d-2866a49a8d3a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.736355] env[69367]: DEBUG nova.compute.manager [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] Start spawning the instance on the hypervisor. {{(pid=69367) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 777.761834] env[69367]: DEBUG nova.virt.hardware [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-19T12:49:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-19T12:49:28Z,direct_url=,disk_format='vmdk',id=2b099420-9152-4d93-9609-4c9317824c11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cd7c200d5cd6461fb951580f8c764c42',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-19T12:49:29Z,virtual_size=,visibility=), allow threads: False {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 777.762037] env[69367]: DEBUG nova.virt.hardware [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Flavor limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 777.762217] env[69367]: DEBUG nova.virt.hardware [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Image limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 777.762391] env[69367]: DEBUG nova.virt.hardware [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Flavor pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 777.762538] env[69367]: DEBUG nova.virt.hardware [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Image pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 777.762684] env[69367]: DEBUG nova.virt.hardware [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 777.762892] env[69367]: DEBUG nova.virt.hardware [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 777.763069] env[69367]: DEBUG nova.virt.hardware [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 777.763243] env[69367]: DEBUG nova.virt.hardware [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Got 1 possible topologies {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 777.763409] env[69367]: DEBUG nova.virt.hardware [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 777.763583] env[69367]: DEBUG nova.virt.hardware [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 777.764516] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b514d79-0be3-4eda-99be-244d59ff09cf {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.772926] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1e2b242-e44b-4f36-9986-444bfe0947f0 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.803040] env[69367]: DEBUG oslo_concurrency.lockutils [req-8d649e5f-85e2-42ff-8d2e-6fb939fc8de6 req-1ebd328b-08e8-4547-8f1d-4e4314cf9849 service nova] Releasing lock "refresh_cache-7f937d89-684b-44f5-9f30-783aeafe99d1" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 777.826937] env[69367]: DEBUG nova.scheduler.client.report [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Inventory has not changed for provider 19ddf8be-7305-4f70-8366-52a9957232e6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 778.051648] env[69367]: DEBUG oslo_vmware.api [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]5277149e-228a-142a-f17d-2866a49a8d3a, 'name': SearchDatastore_Task, 'duration_secs': 0.01054} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.052049] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 778.052396] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore2] 7f937d89-684b-44f5-9f30-783aeafe99d1/7f937d89-684b-44f5-9f30-783aeafe99d1.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 778.052513] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6a326ccb-f778-419e-b582-3cb4771dd04f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.061241] env[69367]: DEBUG oslo_vmware.api [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Waiting for the task: (returnval){ [ 778.061241] env[69367]: value = "task-4233975" [ 778.061241] env[69367]: _type = "Task" [ 778.061241] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.070665] env[69367]: DEBUG oslo_vmware.api [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': task-4233975, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.097502] env[69367]: DEBUG oslo_concurrency.lockutils [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Acquiring lock "48470f96-56d2-4ca2-8078-c5ff4f6db71b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 778.097827] env[69367]: DEBUG oslo_concurrency.lockutils [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Lock "48470f96-56d2-4ca2-8078-c5ff4f6db71b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 778.217337] env[69367]: DEBUG nova.network.neutron [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] Successfully updated port: 1da2a1bd-4886-4d63-82a9-cd4fdd2c31d5 {{(pid=69367) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 778.332285] env[69367]: DEBUG oslo_concurrency.lockutils [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.631s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 778.333249] env[69367]: DEBUG nova.compute.manager [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Start building networks asynchronously for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 778.340141] env[69367]: DEBUG oslo_concurrency.lockutils [None req-109a7ce6-4940-416c-adf7-78a1c5318ddc tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.558s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 778.340141] env[69367]: INFO nova.compute.claims [None req-109a7ce6-4940-416c-adf7-78a1c5318ddc tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 63b3fceb-2a10-4626-a09d-5943535ad98c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 778.457566] env[69367]: DEBUG nova.compute.manager [req-4aaa3405-443a-421a-b5a3-c7b7df5f15a8 req-189eba65-7e9e-4e4d-9d36-bb577b602355 service nova] [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] Received event network-vif-plugged-1da2a1bd-4886-4d63-82a9-cd4fdd2c31d5 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 778.457798] env[69367]: DEBUG oslo_concurrency.lockutils [req-4aaa3405-443a-421a-b5a3-c7b7df5f15a8 req-189eba65-7e9e-4e4d-9d36-bb577b602355 service nova] Acquiring lock "ab9d8e3e-65c5-4ac9-920f-3042b8cf2054-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 778.458509] env[69367]: DEBUG oslo_concurrency.lockutils [req-4aaa3405-443a-421a-b5a3-c7b7df5f15a8 req-189eba65-7e9e-4e4d-9d36-bb577b602355 service nova] Lock "ab9d8e3e-65c5-4ac9-920f-3042b8cf2054-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 778.458509] env[69367]: DEBUG oslo_concurrency.lockutils [req-4aaa3405-443a-421a-b5a3-c7b7df5f15a8 req-189eba65-7e9e-4e4d-9d36-bb577b602355 service nova] Lock "ab9d8e3e-65c5-4ac9-920f-3042b8cf2054-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 778.458509] env[69367]: DEBUG nova.compute.manager [req-4aaa3405-443a-421a-b5a3-c7b7df5f15a8 req-189eba65-7e9e-4e4d-9d36-bb577b602355 service nova] [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] No waiting events found dispatching network-vif-plugged-1da2a1bd-4886-4d63-82a9-cd4fdd2c31d5 {{(pid=69367) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 778.458781] env[69367]: WARNING nova.compute.manager [req-4aaa3405-443a-421a-b5a3-c7b7df5f15a8 req-189eba65-7e9e-4e4d-9d36-bb577b602355 service nova] [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] Received unexpected event network-vif-plugged-1da2a1bd-4886-4d63-82a9-cd4fdd2c31d5 for instance with vm_state building and task_state spawning. [ 778.458781] env[69367]: DEBUG nova.compute.manager [req-4aaa3405-443a-421a-b5a3-c7b7df5f15a8 req-189eba65-7e9e-4e4d-9d36-bb577b602355 service nova] [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] Received event network-changed-1da2a1bd-4886-4d63-82a9-cd4fdd2c31d5 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 778.459043] env[69367]: DEBUG nova.compute.manager [req-4aaa3405-443a-421a-b5a3-c7b7df5f15a8 req-189eba65-7e9e-4e4d-9d36-bb577b602355 service nova] [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] Refreshing instance network info cache due to event network-changed-1da2a1bd-4886-4d63-82a9-cd4fdd2c31d5. {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11772}} [ 778.459355] env[69367]: DEBUG oslo_concurrency.lockutils [req-4aaa3405-443a-421a-b5a3-c7b7df5f15a8 req-189eba65-7e9e-4e4d-9d36-bb577b602355 service nova] Acquiring lock "refresh_cache-ab9d8e3e-65c5-4ac9-920f-3042b8cf2054" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 778.459549] env[69367]: DEBUG oslo_concurrency.lockutils [req-4aaa3405-443a-421a-b5a3-c7b7df5f15a8 req-189eba65-7e9e-4e4d-9d36-bb577b602355 service nova] Acquired lock "refresh_cache-ab9d8e3e-65c5-4ac9-920f-3042b8cf2054" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 778.459728] env[69367]: DEBUG nova.network.neutron [req-4aaa3405-443a-421a-b5a3-c7b7df5f15a8 req-189eba65-7e9e-4e4d-9d36-bb577b602355 service nova] [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] Refreshing network info cache for port 1da2a1bd-4886-4d63-82a9-cd4fdd2c31d5 {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 778.573643] env[69367]: DEBUG oslo_vmware.api [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': task-4233975, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.720429] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Acquiring lock "refresh_cache-ab9d8e3e-65c5-4ac9-920f-3042b8cf2054" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 778.845782] env[69367]: DEBUG nova.compute.utils [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Using /dev/sd instead of None {{(pid=69367) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 778.847424] env[69367]: DEBUG nova.compute.manager [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Allocating IP information in the background. {{(pid=69367) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 778.847626] env[69367]: DEBUG nova.network.neutron [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 557dc011-44a1-4240-9596-d055d57e176f] allocate_for_instance() {{(pid=69367) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 778.891792] env[69367]: DEBUG nova.policy [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9c09f4a51f124383a4f6fdb69330416d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bd27807405a646e989b95325358a87eb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69367) authorize /opt/stack/nova/nova/policy.py:192}} [ 779.072634] env[69367]: DEBUG oslo_vmware.api [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': task-4233975, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.593869} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.072948] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore2] 7f937d89-684b-44f5-9f30-783aeafe99d1/7f937d89-684b-44f5-9f30-783aeafe99d1.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 779.073171] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] Extending root virtual disk to 1048576 {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 779.073437] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2f2b75fb-5fe1-4570-9010-e9ca2de46997 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.080459] env[69367]: DEBUG oslo_vmware.api [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Waiting for the task: (returnval){ [ 779.080459] env[69367]: value = "task-4233976" [ 779.080459] env[69367]: _type = "Task" [ 779.080459] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.092522] env[69367]: DEBUG oslo_vmware.api [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': task-4233976, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.191012] env[69367]: DEBUG nova.network.neutron [req-4aaa3405-443a-421a-b5a3-c7b7df5f15a8 req-189eba65-7e9e-4e4d-9d36-bb577b602355 service nova] [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 779.334735] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a1156e1e-41ad-40b1-a520-164694ed7948 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 779.351642] env[69367]: DEBUG nova.compute.manager [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Start building block device mappings for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 779.363097] env[69367]: DEBUG nova.network.neutron [req-4aaa3405-443a-421a-b5a3-c7b7df5f15a8 req-189eba65-7e9e-4e4d-9d36-bb577b602355 service nova] [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 779.593092] env[69367]: DEBUG oslo_vmware.api [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': task-4233976, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064411} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.595836] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] Extended root virtual disk {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 779.597029] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c158eae-fe9d-4637-b78f-a2f087b34bb7 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.620713] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] Reconfiguring VM instance instance-00000035 to attach disk [datastore2] 7f937d89-684b-44f5-9f30-783aeafe99d1/7f937d89-684b-44f5-9f30-783aeafe99d1.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 779.624157] env[69367]: DEBUG nova.network.neutron [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Successfully created port: cb95db9d-9279-4fd5-bf3d-c4edcbfd26c1 {{(pid=69367) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 779.625877] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2bf02686-e954-480f-8089-86898c785dcf {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.647617] env[69367]: DEBUG oslo_vmware.api [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Waiting for the task: (returnval){ [ 779.647617] env[69367]: value = "task-4233977" [ 779.647617] env[69367]: _type = "Task" [ 779.647617] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.658594] env[69367]: DEBUG oslo_vmware.api [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': task-4233977, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.864834] env[69367]: DEBUG oslo_concurrency.lockutils [req-4aaa3405-443a-421a-b5a3-c7b7df5f15a8 req-189eba65-7e9e-4e4d-9d36-bb577b602355 service nova] Releasing lock "refresh_cache-ab9d8e3e-65c5-4ac9-920f-3042b8cf2054" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 779.865255] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Acquired lock "refresh_cache-ab9d8e3e-65c5-4ac9-920f-3042b8cf2054" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 779.865460] env[69367]: DEBUG nova.network.neutron [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 779.894730] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e39a6d40-ff92-4f1a-b1ab-a1cc2ee8f5e3 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.905172] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f454432-0342-4550-bc4c-b83913210b72 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.939755] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b27c053c-e0e3-4c1b-a99e-eb06ae9733e3 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.948456] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a533a881-bb58-4458-a2dd-d5d765feee84 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.966283] env[69367]: DEBUG nova.compute.provider_tree [None req-109a7ce6-4940-416c-adf7-78a1c5318ddc tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 780.158688] env[69367]: DEBUG oslo_vmware.api [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': task-4233977, 'name': ReconfigVM_Task, 'duration_secs': 0.309233} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.159056] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] Reconfigured VM instance instance-00000035 to attach disk [datastore2] 7f937d89-684b-44f5-9f30-783aeafe99d1/7f937d89-684b-44f5-9f30-783aeafe99d1.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 780.159700] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-287c6936-cbcf-46d7-9040-d987fab1d1e7 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.167249] env[69367]: DEBUG oslo_vmware.api [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Waiting for the task: (returnval){ [ 780.167249] env[69367]: value = "task-4233978" [ 780.167249] env[69367]: _type = "Task" [ 780.167249] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.176480] env[69367]: DEBUG oslo_vmware.api [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': task-4233978, 'name': Rename_Task} progress is 5%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.364889] env[69367]: DEBUG nova.compute.manager [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Start spawning the instance on the hypervisor. {{(pid=69367) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 780.393422] env[69367]: DEBUG nova.virt.hardware [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-19T12:49:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-19T12:49:28Z,direct_url=,disk_format='vmdk',id=2b099420-9152-4d93-9609-4c9317824c11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cd7c200d5cd6461fb951580f8c764c42',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-19T12:49:29Z,virtual_size=,visibility=), allow threads: False {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 780.393682] env[69367]: DEBUG nova.virt.hardware [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Flavor limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 780.393836] env[69367]: DEBUG nova.virt.hardware [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Image limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 780.394033] env[69367]: DEBUG nova.virt.hardware [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Flavor pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 780.394191] env[69367]: DEBUG nova.virt.hardware [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Image pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 780.394340] env[69367]: DEBUG nova.virt.hardware [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 780.394543] env[69367]: DEBUG nova.virt.hardware [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 780.394701] env[69367]: DEBUG nova.virt.hardware [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 780.394868] env[69367]: DEBUG nova.virt.hardware [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Got 1 possible topologies {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 780.395040] env[69367]: DEBUG nova.virt.hardware [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 780.395223] env[69367]: DEBUG nova.virt.hardware [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 780.396126] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e214b96-f67f-4ee9-b59f-b9991d44bb31 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.399367] env[69367]: DEBUG nova.network.neutron [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 780.407492] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a38566e-6890-4ad1-9acc-1c18fea99327 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.489535] env[69367]: ERROR nova.scheduler.client.report [None req-109a7ce6-4940-416c-adf7-78a1c5318ddc tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [req-82bc14fc-98e8-4b96-86c1-02d1fb7a648b] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-82bc14fc-98e8-4b96-86c1-02d1fb7a648b"}]} [ 780.489926] env[69367]: DEBUG oslo_concurrency.lockutils [None req-109a7ce6-4940-416c-adf7-78a1c5318ddc tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.153s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 780.490740] env[69367]: ERROR nova.compute.manager [None req-109a7ce6-4940-416c-adf7-78a1c5318ddc tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 63b3fceb-2a10-4626-a09d-5943535ad98c] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 780.490740] env[69367]: ERROR nova.compute.manager [instance: 63b3fceb-2a10-4626-a09d-5943535ad98c] Traceback (most recent call last): [ 780.490740] env[69367]: ERROR nova.compute.manager [instance: 63b3fceb-2a10-4626-a09d-5943535ad98c] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 780.490740] env[69367]: ERROR nova.compute.manager [instance: 63b3fceb-2a10-4626-a09d-5943535ad98c] yield [ 780.490740] env[69367]: ERROR nova.compute.manager [instance: 63b3fceb-2a10-4626-a09d-5943535ad98c] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 780.490740] env[69367]: ERROR nova.compute.manager [instance: 63b3fceb-2a10-4626-a09d-5943535ad98c] self.set_inventory_for_provider( [ 780.490740] env[69367]: ERROR nova.compute.manager [instance: 63b3fceb-2a10-4626-a09d-5943535ad98c] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 780.490740] env[69367]: ERROR nova.compute.manager [instance: 63b3fceb-2a10-4626-a09d-5943535ad98c] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 780.490999] env[69367]: ERROR nova.compute.manager [instance: 63b3fceb-2a10-4626-a09d-5943535ad98c] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-82bc14fc-98e8-4b96-86c1-02d1fb7a648b"}]} [ 780.490999] env[69367]: ERROR nova.compute.manager [instance: 63b3fceb-2a10-4626-a09d-5943535ad98c] [ 780.490999] env[69367]: ERROR nova.compute.manager [instance: 63b3fceb-2a10-4626-a09d-5943535ad98c] During handling of the above exception, another exception occurred: [ 780.490999] env[69367]: ERROR nova.compute.manager [instance: 63b3fceb-2a10-4626-a09d-5943535ad98c] [ 780.490999] env[69367]: ERROR nova.compute.manager [instance: 63b3fceb-2a10-4626-a09d-5943535ad98c] Traceback (most recent call last): [ 780.490999] env[69367]: ERROR nova.compute.manager [instance: 63b3fceb-2a10-4626-a09d-5943535ad98c] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 780.490999] env[69367]: ERROR nova.compute.manager [instance: 63b3fceb-2a10-4626-a09d-5943535ad98c] with self.rt.instance_claim(context, instance, node, allocs, [ 780.490999] env[69367]: ERROR nova.compute.manager [instance: 63b3fceb-2a10-4626-a09d-5943535ad98c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 780.490999] env[69367]: ERROR nova.compute.manager [instance: 63b3fceb-2a10-4626-a09d-5943535ad98c] return f(*args, **kwargs) [ 780.491295] env[69367]: ERROR nova.compute.manager [instance: 63b3fceb-2a10-4626-a09d-5943535ad98c] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 780.491295] env[69367]: ERROR nova.compute.manager [instance: 63b3fceb-2a10-4626-a09d-5943535ad98c] self._update(elevated, cn) [ 780.491295] env[69367]: ERROR nova.compute.manager [instance: 63b3fceb-2a10-4626-a09d-5943535ad98c] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 780.491295] env[69367]: ERROR nova.compute.manager [instance: 63b3fceb-2a10-4626-a09d-5943535ad98c] self._update_to_placement(context, compute_node, startup) [ 780.491295] env[69367]: ERROR nova.compute.manager [instance: 63b3fceb-2a10-4626-a09d-5943535ad98c] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 780.491295] env[69367]: ERROR nova.compute.manager [instance: 63b3fceb-2a10-4626-a09d-5943535ad98c] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 780.491295] env[69367]: ERROR nova.compute.manager [instance: 63b3fceb-2a10-4626-a09d-5943535ad98c] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 780.491295] env[69367]: ERROR nova.compute.manager [instance: 63b3fceb-2a10-4626-a09d-5943535ad98c] return attempt.get(self._wrap_exception) [ 780.491295] env[69367]: ERROR nova.compute.manager [instance: 63b3fceb-2a10-4626-a09d-5943535ad98c] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 780.491295] env[69367]: ERROR nova.compute.manager [instance: 63b3fceb-2a10-4626-a09d-5943535ad98c] six.reraise(self.value[0], self.value[1], self.value[2]) [ 780.491295] env[69367]: ERROR nova.compute.manager [instance: 63b3fceb-2a10-4626-a09d-5943535ad98c] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 780.491295] env[69367]: ERROR nova.compute.manager [instance: 63b3fceb-2a10-4626-a09d-5943535ad98c] raise value [ 780.491295] env[69367]: ERROR nova.compute.manager [instance: 63b3fceb-2a10-4626-a09d-5943535ad98c] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 780.491837] env[69367]: ERROR nova.compute.manager [instance: 63b3fceb-2a10-4626-a09d-5943535ad98c] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 780.491837] env[69367]: ERROR nova.compute.manager [instance: 63b3fceb-2a10-4626-a09d-5943535ad98c] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 780.491837] env[69367]: ERROR nova.compute.manager [instance: 63b3fceb-2a10-4626-a09d-5943535ad98c] self.reportclient.update_from_provider_tree( [ 780.491837] env[69367]: ERROR nova.compute.manager [instance: 63b3fceb-2a10-4626-a09d-5943535ad98c] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 780.491837] env[69367]: ERROR nova.compute.manager [instance: 63b3fceb-2a10-4626-a09d-5943535ad98c] with catch_all(pd.uuid): [ 780.491837] env[69367]: ERROR nova.compute.manager [instance: 63b3fceb-2a10-4626-a09d-5943535ad98c] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 780.491837] env[69367]: ERROR nova.compute.manager [instance: 63b3fceb-2a10-4626-a09d-5943535ad98c] self.gen.throw(typ, value, traceback) [ 780.491837] env[69367]: ERROR nova.compute.manager [instance: 63b3fceb-2a10-4626-a09d-5943535ad98c] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 780.491837] env[69367]: ERROR nova.compute.manager [instance: 63b3fceb-2a10-4626-a09d-5943535ad98c] raise exception.ResourceProviderSyncFailed() [ 780.491837] env[69367]: ERROR nova.compute.manager [instance: 63b3fceb-2a10-4626-a09d-5943535ad98c] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 780.491837] env[69367]: ERROR nova.compute.manager [instance: 63b3fceb-2a10-4626-a09d-5943535ad98c] [ 780.492185] env[69367]: DEBUG nova.compute.utils [None req-109a7ce6-4940-416c-adf7-78a1c5318ddc tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 63b3fceb-2a10-4626-a09d-5943535ad98c] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 780.493694] env[69367]: DEBUG oslo_concurrency.lockutils [None req-9f96bab7-a5b1-41bd-b4f3-530a3a59f53f tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.198s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 780.495383] env[69367]: INFO nova.compute.claims [None req-9f96bab7-a5b1-41bd-b4f3-530a3a59f53f tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] [instance: 022ca95b-30cc-41f1-be48-51fdfe1f0b14] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 780.497776] env[69367]: DEBUG nova.compute.manager [None req-109a7ce6-4940-416c-adf7-78a1c5318ddc tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 63b3fceb-2a10-4626-a09d-5943535ad98c] Build of instance 63b3fceb-2a10-4626-a09d-5943535ad98c was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 780.498230] env[69367]: DEBUG nova.compute.manager [None req-109a7ce6-4940-416c-adf7-78a1c5318ddc tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 63b3fceb-2a10-4626-a09d-5943535ad98c] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 780.498462] env[69367]: DEBUG oslo_concurrency.lockutils [None req-109a7ce6-4940-416c-adf7-78a1c5318ddc tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Acquiring lock "refresh_cache-63b3fceb-2a10-4626-a09d-5943535ad98c" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 780.499554] env[69367]: DEBUG oslo_concurrency.lockutils [None req-109a7ce6-4940-416c-adf7-78a1c5318ddc tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Acquired lock "refresh_cache-63b3fceb-2a10-4626-a09d-5943535ad98c" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 780.499554] env[69367]: DEBUG nova.network.neutron [None req-109a7ce6-4940-416c-adf7-78a1c5318ddc tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 63b3fceb-2a10-4626-a09d-5943535ad98c] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 780.574435] env[69367]: DEBUG nova.network.neutron [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] Updating instance_info_cache with network_info: [{"id": "1da2a1bd-4886-4d63-82a9-cd4fdd2c31d5", "address": "fa:16:3e:b7:f1:04", "network": {"id": "e89e8083-43a5-4e6e-aa58-98ecffb55d24", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-76221324-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9c8ac08a704e476fbe794f66f61e27a5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d4ef133-b6f3-41d1-add4-92a1482195cf", "external-id": "nsx-vlan-transportzone-446", "segmentation_id": 446, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1da2a1bd-48", "ovs_interfaceid": "1da2a1bd-4886-4d63-82a9-cd4fdd2c31d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 780.678400] env[69367]: DEBUG oslo_vmware.api [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': task-4233978, 'name': Rename_Task, 'duration_secs': 0.12928} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.678742] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] Powering on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 780.679017] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9a279829-2a96-44e6-b5d9-b453d4c72225 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.687067] env[69367]: DEBUG oslo_vmware.api [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Waiting for the task: (returnval){ [ 780.687067] env[69367]: value = "task-4233979" [ 780.687067] env[69367]: _type = "Task" [ 780.687067] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.697473] env[69367]: DEBUG oslo_vmware.api [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': task-4233979, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.033937] env[69367]: DEBUG nova.network.neutron [None req-109a7ce6-4940-416c-adf7-78a1c5318ddc tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 63b3fceb-2a10-4626-a09d-5943535ad98c] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 781.077707] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Releasing lock "refresh_cache-ab9d8e3e-65c5-4ac9-920f-3042b8cf2054" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 781.078053] env[69367]: DEBUG nova.compute.manager [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] Instance network_info: |[{"id": "1da2a1bd-4886-4d63-82a9-cd4fdd2c31d5", "address": "fa:16:3e:b7:f1:04", "network": {"id": "e89e8083-43a5-4e6e-aa58-98ecffb55d24", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-76221324-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9c8ac08a704e476fbe794f66f61e27a5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d4ef133-b6f3-41d1-add4-92a1482195cf", "external-id": "nsx-vlan-transportzone-446", "segmentation_id": 446, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1da2a1bd-48", "ovs_interfaceid": "1da2a1bd-4886-4d63-82a9-cd4fdd2c31d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69367) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 781.078573] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b7:f1:04', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6d4ef133-b6f3-41d1-add4-92a1482195cf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1da2a1bd-4886-4d63-82a9-cd4fdd2c31d5', 'vif_model': 'vmxnet3'}] {{(pid=69367) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 781.094467] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 781.097498] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] Creating VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 781.098259] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-639441a1-21be-4949-bfe6-3cb6f6adbe76 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.119265] env[69367]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 781.119265] env[69367]: value = "task-4233980" [ 781.119265] env[69367]: _type = "Task" [ 781.119265] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.130065] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4233980, 'name': CreateVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.189155] env[69367]: DEBUG nova.network.neutron [None req-109a7ce6-4940-416c-adf7-78a1c5318ddc tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 63b3fceb-2a10-4626-a09d-5943535ad98c] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 781.200308] env[69367]: DEBUG oslo_vmware.api [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': task-4233979, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.229756] env[69367]: DEBUG nova.compute.manager [req-31b183db-35b6-4c54-a604-5808dbe1d044 req-7f182912-ad1f-48fe-910d-cc0e279c9c0c service nova] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Received event network-vif-plugged-cb95db9d-9279-4fd5-bf3d-c4edcbfd26c1 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 781.230017] env[69367]: DEBUG oslo_concurrency.lockutils [req-31b183db-35b6-4c54-a604-5808dbe1d044 req-7f182912-ad1f-48fe-910d-cc0e279c9c0c service nova] Acquiring lock "557dc011-44a1-4240-9596-d055d57e176f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 781.230236] env[69367]: DEBUG oslo_concurrency.lockutils [req-31b183db-35b6-4c54-a604-5808dbe1d044 req-7f182912-ad1f-48fe-910d-cc0e279c9c0c service nova] Lock "557dc011-44a1-4240-9596-d055d57e176f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 781.230434] env[69367]: DEBUG oslo_concurrency.lockutils [req-31b183db-35b6-4c54-a604-5808dbe1d044 req-7f182912-ad1f-48fe-910d-cc0e279c9c0c service nova] Lock "557dc011-44a1-4240-9596-d055d57e176f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 781.230816] env[69367]: DEBUG nova.compute.manager [req-31b183db-35b6-4c54-a604-5808dbe1d044 req-7f182912-ad1f-48fe-910d-cc0e279c9c0c service nova] [instance: 557dc011-44a1-4240-9596-d055d57e176f] No waiting events found dispatching network-vif-plugged-cb95db9d-9279-4fd5-bf3d-c4edcbfd26c1 {{(pid=69367) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 781.230816] env[69367]: WARNING nova.compute.manager [req-31b183db-35b6-4c54-a604-5808dbe1d044 req-7f182912-ad1f-48fe-910d-cc0e279c9c0c service nova] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Received unexpected event network-vif-plugged-cb95db9d-9279-4fd5-bf3d-c4edcbfd26c1 for instance with vm_state building and task_state spawning. [ 781.334629] env[69367]: DEBUG nova.network.neutron [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Successfully updated port: cb95db9d-9279-4fd5-bf3d-c4edcbfd26c1 {{(pid=69367) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 781.520991] env[69367]: DEBUG nova.scheduler.client.report [None req-9f96bab7-a5b1-41bd-b4f3-530a3a59f53f tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 781.538051] env[69367]: DEBUG nova.scheduler.client.report [None req-9f96bab7-a5b1-41bd-b4f3-530a3a59f53f tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 781.538310] env[69367]: DEBUG nova.compute.provider_tree [None req-9f96bab7-a5b1-41bd-b4f3-530a3a59f53f tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 781.551015] env[69367]: DEBUG nova.scheduler.client.report [None req-9f96bab7-a5b1-41bd-b4f3-530a3a59f53f tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 781.569545] env[69367]: DEBUG nova.scheduler.client.report [None req-9f96bab7-a5b1-41bd-b4f3-530a3a59f53f tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 781.632095] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4233980, 'name': CreateVM_Task, 'duration_secs': 0.310943} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.633032] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] Created VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 781.633032] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 781.633336] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 781.633569] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 781.633829] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f9fbd489-1dc1-4cde-a1ce-1d990fb5849e {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.639845] env[69367]: DEBUG oslo_vmware.api [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Waiting for the task: (returnval){ [ 781.639845] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52111351-a66d-68d4-3868-eb078b71e968" [ 781.639845] env[69367]: _type = "Task" [ 781.639845] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.652874] env[69367]: DEBUG oslo_vmware.api [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52111351-a66d-68d4-3868-eb078b71e968, 'name': SearchDatastore_Task, 'duration_secs': 0.01074} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.653095] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 781.653369] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] Processing image 2b099420-9152-4d93-9609-4c9317824c11 {{(pid=69367) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 781.653545] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 781.653695] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 781.653876] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 781.654164] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0f5aa73f-7087-4978-a376-3846e320a3ff {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.668542] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 781.668542] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69367) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 781.669855] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b5cf17c6-a1f9-43a8-88b7-7fa1b992c8dc {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.678748] env[69367]: DEBUG oslo_vmware.api [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Waiting for the task: (returnval){ [ 781.678748] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52282baf-2818-528e-c1cf-286fa63676b9" [ 781.678748] env[69367]: _type = "Task" [ 781.678748] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.691303] env[69367]: DEBUG oslo_vmware.api [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52282baf-2818-528e-c1cf-286fa63676b9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.696350] env[69367]: DEBUG oslo_concurrency.lockutils [None req-109a7ce6-4940-416c-adf7-78a1c5318ddc tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Releasing lock "refresh_cache-63b3fceb-2a10-4626-a09d-5943535ad98c" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 781.696350] env[69367]: DEBUG nova.compute.manager [None req-109a7ce6-4940-416c-adf7-78a1c5318ddc tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 781.696350] env[69367]: DEBUG nova.compute.manager [None req-109a7ce6-4940-416c-adf7-78a1c5318ddc tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 63b3fceb-2a10-4626-a09d-5943535ad98c] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 781.696350] env[69367]: DEBUG nova.network.neutron [None req-109a7ce6-4940-416c-adf7-78a1c5318ddc tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 63b3fceb-2a10-4626-a09d-5943535ad98c] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 781.703496] env[69367]: DEBUG oslo_vmware.api [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': task-4233979, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.715487] env[69367]: DEBUG nova.network.neutron [None req-109a7ce6-4940-416c-adf7-78a1c5318ddc tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 63b3fceb-2a10-4626-a09d-5943535ad98c] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 781.839507] env[69367]: DEBUG oslo_concurrency.lockutils [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Acquiring lock "refresh_cache-557dc011-44a1-4240-9596-d055d57e176f" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 781.839507] env[69367]: DEBUG oslo_concurrency.lockutils [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Acquired lock "refresh_cache-557dc011-44a1-4240-9596-d055d57e176f" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 781.839507] env[69367]: DEBUG nova.network.neutron [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 782.043886] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23ce8ca3-0f5a-4805-916b-ffad72ea2070 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.051987] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5945de6-d92f-4e10-bddb-5ccd28362ec4 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.084051] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0bcb8a1-55a6-482c-a2be-5d5e4dc7430d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.092348] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4e15607-44b2-486e-8a77-8860ba1fed80 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.107339] env[69367]: DEBUG nova.compute.provider_tree [None req-9f96bab7-a5b1-41bd-b4f3-530a3a59f53f tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 782.190293] env[69367]: DEBUG oslo_vmware.api [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52282baf-2818-528e-c1cf-286fa63676b9, 'name': SearchDatastore_Task, 'duration_secs': 0.011358} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.191211] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d5b924f1-6639-45df-bbb8-b6db8244ff7b {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.200918] env[69367]: DEBUG oslo_vmware.api [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Waiting for the task: (returnval){ [ 782.200918] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]523c12d1-c5c4-79b1-122b-8e2ab03c0399" [ 782.200918] env[69367]: _type = "Task" [ 782.200918] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.204415] env[69367]: DEBUG oslo_vmware.api [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': task-4233979, 'name': PowerOnVM_Task, 'duration_secs': 1.054466} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.207719] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] Powered on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 782.207942] env[69367]: INFO nova.compute.manager [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] Took 8.70 seconds to spawn the instance on the hypervisor. [ 782.208164] env[69367]: DEBUG nova.compute.manager [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] Checking state {{(pid=69367) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 782.208951] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca6f7c01-6088-41ea-b4d2-01f063af1ea3 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.221108] env[69367]: DEBUG nova.network.neutron [None req-109a7ce6-4940-416c-adf7-78a1c5318ddc tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 63b3fceb-2a10-4626-a09d-5943535ad98c] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 782.226056] env[69367]: DEBUG oslo_vmware.api [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]523c12d1-c5c4-79b1-122b-8e2ab03c0399, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.386723] env[69367]: DEBUG nova.network.neutron [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 782.523998] env[69367]: DEBUG nova.network.neutron [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Updating instance_info_cache with network_info: [{"id": "cb95db9d-9279-4fd5-bf3d-c4edcbfd26c1", "address": "fa:16:3e:ff:f7:e3", "network": {"id": "0bfdc337-bb57-4c33-9907-9098384ed460", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1159515822-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd27807405a646e989b95325358a87eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9bb629cd-6d0f-4bed-965c-bd04a2f3ec49", "external-id": "nsx-vlan-transportzone-848", "segmentation_id": 848, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcb95db9d-92", "ovs_interfaceid": "cb95db9d-9279-4fd5-bf3d-c4edcbfd26c1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 782.628203] env[69367]: ERROR nova.scheduler.client.report [None req-9f96bab7-a5b1-41bd-b4f3-530a3a59f53f tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] [req-d689f59a-8903-4edb-bff0-f3e693ecde41] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-d689f59a-8903-4edb-bff0-f3e693ecde41"}]} [ 782.628588] env[69367]: DEBUG oslo_concurrency.lockutils [None req-9f96bab7-a5b1-41bd-b4f3-530a3a59f53f tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.135s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 782.629195] env[69367]: ERROR nova.compute.manager [None req-9f96bab7-a5b1-41bd-b4f3-530a3a59f53f tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] [instance: 022ca95b-30cc-41f1-be48-51fdfe1f0b14] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 782.629195] env[69367]: ERROR nova.compute.manager [instance: 022ca95b-30cc-41f1-be48-51fdfe1f0b14] Traceback (most recent call last): [ 782.629195] env[69367]: ERROR nova.compute.manager [instance: 022ca95b-30cc-41f1-be48-51fdfe1f0b14] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 782.629195] env[69367]: ERROR nova.compute.manager [instance: 022ca95b-30cc-41f1-be48-51fdfe1f0b14] yield [ 782.629195] env[69367]: ERROR nova.compute.manager [instance: 022ca95b-30cc-41f1-be48-51fdfe1f0b14] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 782.629195] env[69367]: ERROR nova.compute.manager [instance: 022ca95b-30cc-41f1-be48-51fdfe1f0b14] self.set_inventory_for_provider( [ 782.629195] env[69367]: ERROR nova.compute.manager [instance: 022ca95b-30cc-41f1-be48-51fdfe1f0b14] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 782.629195] env[69367]: ERROR nova.compute.manager [instance: 022ca95b-30cc-41f1-be48-51fdfe1f0b14] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 782.629474] env[69367]: ERROR nova.compute.manager [instance: 022ca95b-30cc-41f1-be48-51fdfe1f0b14] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-d689f59a-8903-4edb-bff0-f3e693ecde41"}]} [ 782.629474] env[69367]: ERROR nova.compute.manager [instance: 022ca95b-30cc-41f1-be48-51fdfe1f0b14] [ 782.629474] env[69367]: ERROR nova.compute.manager [instance: 022ca95b-30cc-41f1-be48-51fdfe1f0b14] During handling of the above exception, another exception occurred: [ 782.629474] env[69367]: ERROR nova.compute.manager [instance: 022ca95b-30cc-41f1-be48-51fdfe1f0b14] [ 782.629474] env[69367]: ERROR nova.compute.manager [instance: 022ca95b-30cc-41f1-be48-51fdfe1f0b14] Traceback (most recent call last): [ 782.629474] env[69367]: ERROR nova.compute.manager [instance: 022ca95b-30cc-41f1-be48-51fdfe1f0b14] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 782.629474] env[69367]: ERROR nova.compute.manager [instance: 022ca95b-30cc-41f1-be48-51fdfe1f0b14] with self.rt.instance_claim(context, instance, node, allocs, [ 782.629474] env[69367]: ERROR nova.compute.manager [instance: 022ca95b-30cc-41f1-be48-51fdfe1f0b14] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 782.629474] env[69367]: ERROR nova.compute.manager [instance: 022ca95b-30cc-41f1-be48-51fdfe1f0b14] return f(*args, **kwargs) [ 782.629796] env[69367]: ERROR nova.compute.manager [instance: 022ca95b-30cc-41f1-be48-51fdfe1f0b14] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 782.629796] env[69367]: ERROR nova.compute.manager [instance: 022ca95b-30cc-41f1-be48-51fdfe1f0b14] self._update(elevated, cn) [ 782.629796] env[69367]: ERROR nova.compute.manager [instance: 022ca95b-30cc-41f1-be48-51fdfe1f0b14] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 782.629796] env[69367]: ERROR nova.compute.manager [instance: 022ca95b-30cc-41f1-be48-51fdfe1f0b14] self._update_to_placement(context, compute_node, startup) [ 782.629796] env[69367]: ERROR nova.compute.manager [instance: 022ca95b-30cc-41f1-be48-51fdfe1f0b14] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 782.629796] env[69367]: ERROR nova.compute.manager [instance: 022ca95b-30cc-41f1-be48-51fdfe1f0b14] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 782.629796] env[69367]: ERROR nova.compute.manager [instance: 022ca95b-30cc-41f1-be48-51fdfe1f0b14] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 782.629796] env[69367]: ERROR nova.compute.manager [instance: 022ca95b-30cc-41f1-be48-51fdfe1f0b14] return attempt.get(self._wrap_exception) [ 782.629796] env[69367]: ERROR nova.compute.manager [instance: 022ca95b-30cc-41f1-be48-51fdfe1f0b14] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 782.629796] env[69367]: ERROR nova.compute.manager [instance: 022ca95b-30cc-41f1-be48-51fdfe1f0b14] six.reraise(self.value[0], self.value[1], self.value[2]) [ 782.629796] env[69367]: ERROR nova.compute.manager [instance: 022ca95b-30cc-41f1-be48-51fdfe1f0b14] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 782.629796] env[69367]: ERROR nova.compute.manager [instance: 022ca95b-30cc-41f1-be48-51fdfe1f0b14] raise value [ 782.629796] env[69367]: ERROR nova.compute.manager [instance: 022ca95b-30cc-41f1-be48-51fdfe1f0b14] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 782.630248] env[69367]: ERROR nova.compute.manager [instance: 022ca95b-30cc-41f1-be48-51fdfe1f0b14] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 782.630248] env[69367]: ERROR nova.compute.manager [instance: 022ca95b-30cc-41f1-be48-51fdfe1f0b14] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 782.630248] env[69367]: ERROR nova.compute.manager [instance: 022ca95b-30cc-41f1-be48-51fdfe1f0b14] self.reportclient.update_from_provider_tree( [ 782.630248] env[69367]: ERROR nova.compute.manager [instance: 022ca95b-30cc-41f1-be48-51fdfe1f0b14] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 782.630248] env[69367]: ERROR nova.compute.manager [instance: 022ca95b-30cc-41f1-be48-51fdfe1f0b14] with catch_all(pd.uuid): [ 782.630248] env[69367]: ERROR nova.compute.manager [instance: 022ca95b-30cc-41f1-be48-51fdfe1f0b14] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 782.630248] env[69367]: ERROR nova.compute.manager [instance: 022ca95b-30cc-41f1-be48-51fdfe1f0b14] self.gen.throw(typ, value, traceback) [ 782.630248] env[69367]: ERROR nova.compute.manager [instance: 022ca95b-30cc-41f1-be48-51fdfe1f0b14] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 782.630248] env[69367]: ERROR nova.compute.manager [instance: 022ca95b-30cc-41f1-be48-51fdfe1f0b14] raise exception.ResourceProviderSyncFailed() [ 782.630248] env[69367]: ERROR nova.compute.manager [instance: 022ca95b-30cc-41f1-be48-51fdfe1f0b14] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 782.630248] env[69367]: ERROR nova.compute.manager [instance: 022ca95b-30cc-41f1-be48-51fdfe1f0b14] [ 782.630639] env[69367]: DEBUG nova.compute.utils [None req-9f96bab7-a5b1-41bd-b4f3-530a3a59f53f tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] [instance: 022ca95b-30cc-41f1-be48-51fdfe1f0b14] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 782.631094] env[69367]: DEBUG oslo_concurrency.lockutils [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 31.502s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 782.631279] env[69367]: DEBUG nova.objects.instance [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69367) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 782.633873] env[69367]: DEBUG nova.compute.manager [None req-9f96bab7-a5b1-41bd-b4f3-530a3a59f53f tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] [instance: 022ca95b-30cc-41f1-be48-51fdfe1f0b14] Build of instance 022ca95b-30cc-41f1-be48-51fdfe1f0b14 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 782.634453] env[69367]: DEBUG nova.compute.manager [None req-9f96bab7-a5b1-41bd-b4f3-530a3a59f53f tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] [instance: 022ca95b-30cc-41f1-be48-51fdfe1f0b14] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 782.634518] env[69367]: DEBUG oslo_concurrency.lockutils [None req-9f96bab7-a5b1-41bd-b4f3-530a3a59f53f tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] Acquiring lock "refresh_cache-022ca95b-30cc-41f1-be48-51fdfe1f0b14" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 782.634652] env[69367]: DEBUG oslo_concurrency.lockutils [None req-9f96bab7-a5b1-41bd-b4f3-530a3a59f53f tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] Acquired lock "refresh_cache-022ca95b-30cc-41f1-be48-51fdfe1f0b14" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 782.634810] env[69367]: DEBUG nova.network.neutron [None req-9f96bab7-a5b1-41bd-b4f3-530a3a59f53f tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] [instance: 022ca95b-30cc-41f1-be48-51fdfe1f0b14] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 782.715675] env[69367]: DEBUG oslo_vmware.api [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]523c12d1-c5c4-79b1-122b-8e2ab03c0399, 'name': SearchDatastore_Task, 'duration_secs': 0.035233} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.715953] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 782.716235] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore2] ab9d8e3e-65c5-4ac9-920f-3042b8cf2054/ab9d8e3e-65c5-4ac9-920f-3042b8cf2054.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 782.716519] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6fc059ec-cf2b-45c1-ac5b-b171c2604e79 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.724151] env[69367]: DEBUG oslo_vmware.api [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Waiting for the task: (returnval){ [ 782.724151] env[69367]: value = "task-4233981" [ 782.724151] env[69367]: _type = "Task" [ 782.724151] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.733041] env[69367]: INFO nova.compute.manager [None req-109a7ce6-4940-416c-adf7-78a1c5318ddc tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 63b3fceb-2a10-4626-a09d-5943535ad98c] Took 1.04 seconds to deallocate network for instance. [ 782.739613] env[69367]: INFO nova.compute.manager [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] Took 41.20 seconds to build instance. [ 782.744150] env[69367]: DEBUG oslo_vmware.api [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': task-4233981, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.026352] env[69367]: DEBUG oslo_concurrency.lockutils [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Releasing lock "refresh_cache-557dc011-44a1-4240-9596-d055d57e176f" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 783.026759] env[69367]: DEBUG nova.compute.manager [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Instance network_info: |[{"id": "cb95db9d-9279-4fd5-bf3d-c4edcbfd26c1", "address": "fa:16:3e:ff:f7:e3", "network": {"id": "0bfdc337-bb57-4c33-9907-9098384ed460", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1159515822-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd27807405a646e989b95325358a87eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9bb629cd-6d0f-4bed-965c-bd04a2f3ec49", "external-id": "nsx-vlan-transportzone-848", "segmentation_id": 848, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcb95db9d-92", "ovs_interfaceid": "cb95db9d-9279-4fd5-bf3d-c4edcbfd26c1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69367) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 783.027264] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ff:f7:e3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9bb629cd-6d0f-4bed-965c-bd04a2f3ec49', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cb95db9d-9279-4fd5-bf3d-c4edcbfd26c1', 'vif_model': 'vmxnet3'}] {{(pid=69367) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 783.035223] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Creating folder: Project (bd27807405a646e989b95325358a87eb). Parent ref: group-v837645. {{(pid=69367) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 783.035925] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fc4ee9eb-30fa-4a9c-9dda-bd0e3eccb544 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.047589] env[69367]: INFO nova.virt.vmwareapi.vm_util [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Created folder: Project (bd27807405a646e989b95325358a87eb) in parent group-v837645. [ 783.047978] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Creating folder: Instances. Parent ref: group-v837725. {{(pid=69367) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 783.048182] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6ce4478b-5b3c-497f-b77d-4b780a8ff4cb {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.062317] env[69367]: INFO nova.virt.vmwareapi.vm_util [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Created folder: Instances in parent group-v837725. [ 783.062599] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 783.063185] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Creating VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 783.063185] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2f95bd45-21cf-44cf-a8f2-c56125d534ab {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.084165] env[69367]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 783.084165] env[69367]: value = "task-4233984" [ 783.084165] env[69367]: _type = "Task" [ 783.084165] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.092296] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4233984, 'name': CreateVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.161815] env[69367]: DEBUG nova.network.neutron [None req-9f96bab7-a5b1-41bd-b4f3-530a3a59f53f tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] [instance: 022ca95b-30cc-41f1-be48-51fdfe1f0b14] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 783.235260] env[69367]: DEBUG oslo_vmware.api [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': task-4233981, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.245920] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Lock "7f937d89-684b-44f5-9f30-783aeafe99d1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 68.373s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 783.263819] env[69367]: DEBUG nova.compute.manager [req-57f5122a-2a53-46e0-aed2-5660215cf5be req-1b36e007-e417-41e0-9718-9138c57f867d service nova] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Received event network-changed-cb95db9d-9279-4fd5-bf3d-c4edcbfd26c1 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 783.264038] env[69367]: DEBUG nova.compute.manager [req-57f5122a-2a53-46e0-aed2-5660215cf5be req-1b36e007-e417-41e0-9718-9138c57f867d service nova] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Refreshing instance network info cache due to event network-changed-cb95db9d-9279-4fd5-bf3d-c4edcbfd26c1. {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11772}} [ 783.264266] env[69367]: DEBUG oslo_concurrency.lockutils [req-57f5122a-2a53-46e0-aed2-5660215cf5be req-1b36e007-e417-41e0-9718-9138c57f867d service nova] Acquiring lock "refresh_cache-557dc011-44a1-4240-9596-d055d57e176f" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 783.264411] env[69367]: DEBUG oslo_concurrency.lockutils [req-57f5122a-2a53-46e0-aed2-5660215cf5be req-1b36e007-e417-41e0-9718-9138c57f867d service nova] Acquired lock "refresh_cache-557dc011-44a1-4240-9596-d055d57e176f" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 783.264571] env[69367]: DEBUG nova.network.neutron [req-57f5122a-2a53-46e0-aed2-5660215cf5be req-1b36e007-e417-41e0-9718-9138c57f867d service nova] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Refreshing network info cache for port cb95db9d-9279-4fd5-bf3d-c4edcbfd26c1 {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 783.267222] env[69367]: DEBUG nova.network.neutron [None req-9f96bab7-a5b1-41bd-b4f3-530a3a59f53f tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] [instance: 022ca95b-30cc-41f1-be48-51fdfe1f0b14] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 783.594620] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4233984, 'name': CreateVM_Task, 'duration_secs': 0.407071} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.594782] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Created VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 783.595475] env[69367]: DEBUG oslo_concurrency.lockutils [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 783.595646] env[69367]: DEBUG oslo_concurrency.lockutils [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 783.595971] env[69367]: DEBUG oslo_concurrency.lockutils [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 783.596267] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1dc3d22d-6d30-43cc-a587-5c0d96406e42 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.600807] env[69367]: DEBUG oslo_vmware.api [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Waiting for the task: (returnval){ [ 783.600807] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]5294b1c8-3bb8-2675-36b6-51a1d374b275" [ 783.600807] env[69367]: _type = "Task" [ 783.600807] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.609021] env[69367]: DEBUG oslo_vmware.api [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]5294b1c8-3bb8-2675-36b6-51a1d374b275, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.644859] env[69367]: DEBUG oslo_concurrency.lockutils [None req-063bda9e-aa66-4ca5-8f2b-d62b6389ced1 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.014s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 783.646701] env[69367]: DEBUG oslo_concurrency.lockutils [None req-320c0afb-579c-4ab6-a9bf-6526f9c412f0 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.933s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 783.646701] env[69367]: DEBUG nova.objects.instance [None req-320c0afb-579c-4ab6-a9bf-6526f9c412f0 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Lazy-loading 'resources' on Instance uuid 011ab7de-98a7-41fc-9e05-e71965c73c09 {{(pid=69367) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 783.736963] env[69367]: DEBUG oslo_vmware.api [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': task-4233981, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.545659} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.737286] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore2] ab9d8e3e-65c5-4ac9-920f-3042b8cf2054/ab9d8e3e-65c5-4ac9-920f-3042b8cf2054.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 783.737510] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] Extending root virtual disk to 1048576 {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 783.737779] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f7cf1d4c-de56-459f-a426-cdad3c41d410 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.745971] env[69367]: DEBUG oslo_vmware.api [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Waiting for the task: (returnval){ [ 783.745971] env[69367]: value = "task-4233985" [ 783.745971] env[69367]: _type = "Task" [ 783.745971] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.750271] env[69367]: DEBUG nova.compute.manager [None req-c3fd4c24-68da-4e55-bd81-27f04690e5d7 tempest-ImagesOneServerTestJSON-993522534 tempest-ImagesOneServerTestJSON-993522534-project-member] [instance: 3f6a67a9-08db-4a15-ae07-bef02b9a6d48] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 783.758886] env[69367]: DEBUG oslo_vmware.api [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': task-4233985, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.769810] env[69367]: DEBUG oslo_concurrency.lockutils [None req-9f96bab7-a5b1-41bd-b4f3-530a3a59f53f tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] Releasing lock "refresh_cache-022ca95b-30cc-41f1-be48-51fdfe1f0b14" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 783.770120] env[69367]: DEBUG nova.compute.manager [None req-9f96bab7-a5b1-41bd-b4f3-530a3a59f53f tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 783.770342] env[69367]: DEBUG nova.compute.manager [None req-9f96bab7-a5b1-41bd-b4f3-530a3a59f53f tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] [instance: 022ca95b-30cc-41f1-be48-51fdfe1f0b14] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 783.770516] env[69367]: DEBUG nova.network.neutron [None req-9f96bab7-a5b1-41bd-b4f3-530a3a59f53f tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] [instance: 022ca95b-30cc-41f1-be48-51fdfe1f0b14] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 783.775796] env[69367]: INFO nova.scheduler.client.report [None req-109a7ce6-4940-416c-adf7-78a1c5318ddc tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Deleted allocations for instance 63b3fceb-2a10-4626-a09d-5943535ad98c [ 783.818338] env[69367]: DEBUG nova.network.neutron [None req-9f96bab7-a5b1-41bd-b4f3-530a3a59f53f tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] [instance: 022ca95b-30cc-41f1-be48-51fdfe1f0b14] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 784.048133] env[69367]: DEBUG nova.network.neutron [req-57f5122a-2a53-46e0-aed2-5660215cf5be req-1b36e007-e417-41e0-9718-9138c57f867d service nova] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Updated VIF entry in instance network info cache for port cb95db9d-9279-4fd5-bf3d-c4edcbfd26c1. {{(pid=69367) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 784.048542] env[69367]: DEBUG nova.network.neutron [req-57f5122a-2a53-46e0-aed2-5660215cf5be req-1b36e007-e417-41e0-9718-9138c57f867d service nova] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Updating instance_info_cache with network_info: [{"id": "cb95db9d-9279-4fd5-bf3d-c4edcbfd26c1", "address": "fa:16:3e:ff:f7:e3", "network": {"id": "0bfdc337-bb57-4c33-9907-9098384ed460", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1159515822-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd27807405a646e989b95325358a87eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9bb629cd-6d0f-4bed-965c-bd04a2f3ec49", "external-id": "nsx-vlan-transportzone-848", "segmentation_id": 848, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcb95db9d-92", "ovs_interfaceid": "cb95db9d-9279-4fd5-bf3d-c4edcbfd26c1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 784.112508] env[69367]: DEBUG oslo_vmware.api [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]5294b1c8-3bb8-2675-36b6-51a1d374b275, 'name': SearchDatastore_Task, 'duration_secs': 0.010159} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.112682] env[69367]: DEBUG oslo_concurrency.lockutils [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 784.113933] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Processing image 2b099420-9152-4d93-9609-4c9317824c11 {{(pid=69367) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 784.113933] env[69367]: DEBUG oslo_concurrency.lockutils [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 784.113933] env[69367]: DEBUG oslo_concurrency.lockutils [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 784.113933] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 784.113933] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-292a5b7e-b52b-4501-a19e-91d66f576ff9 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.124823] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 784.124823] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69367) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 784.125101] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c6bd7409-1427-4bb4-b485-f6ed65f5b5f0 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.131303] env[69367]: DEBUG oslo_vmware.api [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Waiting for the task: (returnval){ [ 784.131303] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]520a53c0-30df-9e3b-b7fb-997f0b0f27a3" [ 784.131303] env[69367]: _type = "Task" [ 784.131303] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.139579] env[69367]: DEBUG oslo_vmware.api [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]520a53c0-30df-9e3b-b7fb-997f0b0f27a3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.171385] env[69367]: DEBUG nova.scheduler.client.report [None req-320c0afb-579c-4ab6-a9bf-6526f9c412f0 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 784.190426] env[69367]: DEBUG nova.scheduler.client.report [None req-320c0afb-579c-4ab6-a9bf-6526f9c412f0 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 784.191407] env[69367]: DEBUG nova.compute.provider_tree [None req-320c0afb-579c-4ab6-a9bf-6526f9c412f0 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 784.203718] env[69367]: DEBUG nova.scheduler.client.report [None req-320c0afb-579c-4ab6-a9bf-6526f9c412f0 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 784.224440] env[69367]: DEBUG nova.scheduler.client.report [None req-320c0afb-579c-4ab6-a9bf-6526f9c412f0 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 784.259884] env[69367]: DEBUG oslo_vmware.api [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': task-4233985, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.131269} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.262325] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] Extended root virtual disk {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 784.263456] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f5a38db-2bbe-4bd4-afec-fdf4a001e343 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.289627] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] Reconfiguring VM instance instance-00000036 to attach disk [datastore2] ab9d8e3e-65c5-4ac9-920f-3042b8cf2054/ab9d8e3e-65c5-4ac9-920f-3042b8cf2054.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 784.290800] env[69367]: DEBUG oslo_concurrency.lockutils [None req-c3fd4c24-68da-4e55-bd81-27f04690e5d7 tempest-ImagesOneServerTestJSON-993522534 tempest-ImagesOneServerTestJSON-993522534-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 784.294160] env[69367]: DEBUG oslo_concurrency.lockutils [None req-109a7ce6-4940-416c-adf7-78a1c5318ddc tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Lock "63b3fceb-2a10-4626-a09d-5943535ad98c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 59.294s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 784.294160] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d031184a-c009-48cc-8b73-6b526c04e969 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.316674] env[69367]: DEBUG oslo_vmware.api [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Waiting for the task: (returnval){ [ 784.316674] env[69367]: value = "task-4233986" [ 784.316674] env[69367]: _type = "Task" [ 784.316674] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.324208] env[69367]: DEBUG nova.network.neutron [None req-9f96bab7-a5b1-41bd-b4f3-530a3a59f53f tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] [instance: 022ca95b-30cc-41f1-be48-51fdfe1f0b14] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 784.331114] env[69367]: DEBUG oslo_vmware.api [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': task-4233986, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.551632] env[69367]: DEBUG oslo_concurrency.lockutils [req-57f5122a-2a53-46e0-aed2-5660215cf5be req-1b36e007-e417-41e0-9718-9138c57f867d service nova] Releasing lock "refresh_cache-557dc011-44a1-4240-9596-d055d57e176f" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 784.645403] env[69367]: DEBUG oslo_vmware.api [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]520a53c0-30df-9e3b-b7fb-997f0b0f27a3, 'name': SearchDatastore_Task, 'duration_secs': 0.01026} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.646547] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-73052398-15b1-4af9-983f-31f7458d730f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.655029] env[69367]: DEBUG oslo_vmware.api [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Waiting for the task: (returnval){ [ 784.655029] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52f47153-cefa-c2c2-53f2-06208b6476d0" [ 784.655029] env[69367]: _type = "Task" [ 784.655029] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.663676] env[69367]: DEBUG oslo_vmware.api [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52f47153-cefa-c2c2-53f2-06208b6476d0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.700828] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8eadcad-80db-4702-b012-e98ebc1e6087 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.709344] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c525090c-b85c-47c9-90bc-8f290771dc98 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.742342] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aea9fb95-e168-46ec-b116-dc6e1c4ddc39 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.751040] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3659d631-9432-4e59-99f1-ccb3b8ae1afa {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.767871] env[69367]: DEBUG nova.compute.provider_tree [None req-320c0afb-579c-4ab6-a9bf-6526f9c412f0 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 784.812357] env[69367]: DEBUG nova.compute.manager [None req-b883f15e-cdb3-40b5-b382-c4d5549547fd tempest-ServerShowV247Test-1137483895 tempest-ServerShowV247Test-1137483895-project-member] [instance: 17ffa2b1-4a0a-4e14-a7b0-104791adf072] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 784.828347] env[69367]: INFO nova.compute.manager [None req-9f96bab7-a5b1-41bd-b4f3-530a3a59f53f tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] [instance: 022ca95b-30cc-41f1-be48-51fdfe1f0b14] Took 1.06 seconds to deallocate network for instance. [ 784.830925] env[69367]: DEBUG oslo_vmware.api [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': task-4233986, 'name': ReconfigVM_Task, 'duration_secs': 0.299271} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.831390] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] Reconfigured VM instance instance-00000036 to attach disk [datastore2] ab9d8e3e-65c5-4ac9-920f-3042b8cf2054/ab9d8e3e-65c5-4ac9-920f-3042b8cf2054.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 784.832129] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8f6e9a73-d181-472b-a102-0b68c61a9765 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.838952] env[69367]: DEBUG oslo_vmware.api [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Waiting for the task: (returnval){ [ 784.838952] env[69367]: value = "task-4233987" [ 784.838952] env[69367]: _type = "Task" [ 784.838952] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.847887] env[69367]: DEBUG oslo_vmware.api [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': task-4233987, 'name': Rename_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.165494] env[69367]: DEBUG oslo_vmware.api [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52f47153-cefa-c2c2-53f2-06208b6476d0, 'name': SearchDatastore_Task, 'duration_secs': 0.035223} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.165774] env[69367]: DEBUG oslo_concurrency.lockutils [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 785.166035] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore2] 557dc011-44a1-4240-9596-d055d57e176f/557dc011-44a1-4240-9596-d055d57e176f.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 785.166299] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a896b1e4-6e47-404c-ad6f-ecc36e95bbf0 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.173856] env[69367]: DEBUG oslo_vmware.api [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Waiting for the task: (returnval){ [ 785.173856] env[69367]: value = "task-4233988" [ 785.173856] env[69367]: _type = "Task" [ 785.173856] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.182697] env[69367]: DEBUG oslo_vmware.api [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': task-4233988, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.280990] env[69367]: DEBUG oslo_concurrency.lockutils [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Acquiring lock "95efcff3-a81b-49fb-b85a-dae060c023b2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 785.280990] env[69367]: DEBUG oslo_concurrency.lockutils [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Lock "95efcff3-a81b-49fb-b85a-dae060c023b2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 785.290798] env[69367]: ERROR nova.scheduler.client.report [None req-320c0afb-579c-4ab6-a9bf-6526f9c412f0 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] [req-b0e674ac-8af2-41f2-af08-c740f6ac3853] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-b0e674ac-8af2-41f2-af08-c740f6ac3853"}]} [ 785.291331] env[69367]: DEBUG oslo_concurrency.lockutils [None req-320c0afb-579c-4ab6-a9bf-6526f9c412f0 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.645s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 785.292714] env[69367]: ERROR nova.compute.manager [None req-320c0afb-579c-4ab6-a9bf-6526f9c412f0 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] Setting instance vm_state to ERROR: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 785.292714] env[69367]: ERROR nova.compute.manager [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] Traceback (most recent call last): [ 785.292714] env[69367]: ERROR nova.compute.manager [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 785.292714] env[69367]: ERROR nova.compute.manager [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] yield [ 785.292714] env[69367]: ERROR nova.compute.manager [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 785.292714] env[69367]: ERROR nova.compute.manager [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] self.set_inventory_for_provider( [ 785.292714] env[69367]: ERROR nova.compute.manager [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 785.292714] env[69367]: ERROR nova.compute.manager [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 785.293129] env[69367]: ERROR nova.compute.manager [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-b0e674ac-8af2-41f2-af08-c740f6ac3853"}]} [ 785.293129] env[69367]: ERROR nova.compute.manager [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] [ 785.293129] env[69367]: ERROR nova.compute.manager [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] During handling of the above exception, another exception occurred: [ 785.293129] env[69367]: ERROR nova.compute.manager [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] [ 785.293129] env[69367]: ERROR nova.compute.manager [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] Traceback (most recent call last): [ 785.293129] env[69367]: ERROR nova.compute.manager [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 785.293129] env[69367]: ERROR nova.compute.manager [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] self._delete_instance(context, instance, bdms) [ 785.293129] env[69367]: ERROR nova.compute.manager [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 785.293129] env[69367]: ERROR nova.compute.manager [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] self._complete_deletion(context, instance) [ 785.293414] env[69367]: ERROR nova.compute.manager [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 785.293414] env[69367]: ERROR nova.compute.manager [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] self._update_resource_tracker(context, instance) [ 785.293414] env[69367]: ERROR nova.compute.manager [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 785.293414] env[69367]: ERROR nova.compute.manager [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] self.rt.update_usage(context, instance, instance.node) [ 785.293414] env[69367]: ERROR nova.compute.manager [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 785.293414] env[69367]: ERROR nova.compute.manager [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] return f(*args, **kwargs) [ 785.293414] env[69367]: ERROR nova.compute.manager [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 785.293414] env[69367]: ERROR nova.compute.manager [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] self._update(context.elevated(), self.compute_nodes[nodename]) [ 785.293414] env[69367]: ERROR nova.compute.manager [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 785.293414] env[69367]: ERROR nova.compute.manager [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] self._update_to_placement(context, compute_node, startup) [ 785.293414] env[69367]: ERROR nova.compute.manager [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 785.293414] env[69367]: ERROR nova.compute.manager [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 785.293781] env[69367]: ERROR nova.compute.manager [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 785.293781] env[69367]: ERROR nova.compute.manager [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] return attempt.get(self._wrap_exception) [ 785.293781] env[69367]: ERROR nova.compute.manager [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 785.293781] env[69367]: ERROR nova.compute.manager [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] six.reraise(self.value[0], self.value[1], self.value[2]) [ 785.293781] env[69367]: ERROR nova.compute.manager [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 785.293781] env[69367]: ERROR nova.compute.manager [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] raise value [ 785.293781] env[69367]: ERROR nova.compute.manager [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 785.293781] env[69367]: ERROR nova.compute.manager [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 785.293781] env[69367]: ERROR nova.compute.manager [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 785.293781] env[69367]: ERROR nova.compute.manager [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] self.reportclient.update_from_provider_tree( [ 785.293781] env[69367]: ERROR nova.compute.manager [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 785.293781] env[69367]: ERROR nova.compute.manager [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] with catch_all(pd.uuid): [ 785.293781] env[69367]: ERROR nova.compute.manager [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 785.294139] env[69367]: ERROR nova.compute.manager [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] self.gen.throw(typ, value, traceback) [ 785.294139] env[69367]: ERROR nova.compute.manager [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 785.294139] env[69367]: ERROR nova.compute.manager [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] raise exception.ResourceProviderSyncFailed() [ 785.294139] env[69367]: ERROR nova.compute.manager [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 785.294139] env[69367]: ERROR nova.compute.manager [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] [ 785.295404] env[69367]: DEBUG oslo_concurrency.lockutils [None req-c1bd6450-02b3-47f3-be26-375686994005 tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.561s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 785.297152] env[69367]: INFO nova.compute.claims [None req-c1bd6450-02b3-47f3-be26-375686994005 tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] [instance: f3386485-a173-4f5d-8f29-4972df3ae468] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 785.337100] env[69367]: DEBUG oslo_concurrency.lockutils [None req-b883f15e-cdb3-40b5-b382-c4d5549547fd tempest-ServerShowV247Test-1137483895 tempest-ServerShowV247Test-1137483895-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 785.350080] env[69367]: DEBUG oslo_vmware.api [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': task-4233987, 'name': Rename_Task, 'duration_secs': 0.166146} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.350380] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] Powering on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 785.350677] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-08cdd739-d08a-4ed2-8ad7-9ccc98ac51a8 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.360738] env[69367]: DEBUG oslo_vmware.api [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Waiting for the task: (returnval){ [ 785.360738] env[69367]: value = "task-4233989" [ 785.360738] env[69367]: _type = "Task" [ 785.360738] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.370934] env[69367]: DEBUG oslo_vmware.api [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': task-4233989, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.685354] env[69367]: DEBUG oslo_vmware.api [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': task-4233988, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.801408] env[69367]: DEBUG oslo_concurrency.lockutils [None req-320c0afb-579c-4ab6-a9bf-6526f9c412f0 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Lock "011ab7de-98a7-41fc-9e05-e71965c73c09" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.307s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 785.860548] env[69367]: INFO nova.scheduler.client.report [None req-9f96bab7-a5b1-41bd-b4f3-530a3a59f53f tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] Deleted allocations for instance 022ca95b-30cc-41f1-be48-51fdfe1f0b14 [ 785.876991] env[69367]: DEBUG oslo_vmware.api [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': task-4233989, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.186240] env[69367]: DEBUG oslo_vmware.api [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': task-4233988, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.527587} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.186556] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore2] 557dc011-44a1-4240-9596-d055d57e176f/557dc011-44a1-4240-9596-d055d57e176f.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 786.186731] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Extending root virtual disk to 1048576 {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 786.186984] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-93d3b8c5-fdd6-456b-9bf1-72ca7e3630e3 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.194772] env[69367]: DEBUG oslo_vmware.api [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Waiting for the task: (returnval){ [ 786.194772] env[69367]: value = "task-4233990" [ 786.194772] env[69367]: _type = "Task" [ 786.194772] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.203938] env[69367]: DEBUG oslo_vmware.api [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': task-4233990, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.326762] env[69367]: DEBUG nova.scheduler.client.report [None req-c1bd6450-02b3-47f3-be26-375686994005 tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 786.340549] env[69367]: DEBUG nova.scheduler.client.report [None req-c1bd6450-02b3-47f3-be26-375686994005 tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 786.340783] env[69367]: DEBUG nova.compute.provider_tree [None req-c1bd6450-02b3-47f3-be26-375686994005 tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 786.352996] env[69367]: DEBUG nova.scheduler.client.report [None req-c1bd6450-02b3-47f3-be26-375686994005 tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 786.372297] env[69367]: DEBUG nova.scheduler.client.report [None req-c1bd6450-02b3-47f3-be26-375686994005 tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 786.374571] env[69367]: DEBUG oslo_concurrency.lockutils [None req-9f96bab7-a5b1-41bd-b4f3-530a3a59f53f tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] Lock "022ca95b-30cc-41f1-be48-51fdfe1f0b14" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 60.411s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 786.380557] env[69367]: DEBUG oslo_vmware.api [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': task-4233989, 'name': PowerOnVM_Task, 'duration_secs': 0.709052} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.381107] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] Powered on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 786.381325] env[69367]: INFO nova.compute.manager [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] Took 8.64 seconds to spawn the instance on the hypervisor. [ 786.381505] env[69367]: DEBUG nova.compute.manager [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] Checking state {{(pid=69367) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 786.382323] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a7bb0a8-a913-4e6f-8bb3-cdd81b5f6bd3 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.704230] env[69367]: DEBUG oslo_vmware.api [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': task-4233990, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072445} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.706696] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Extended root virtual disk {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 786.707652] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed94c8fb-54d3-4cad-82ca-e2cb3b8b7711 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.731934] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Reconfiguring VM instance instance-00000037 to attach disk [datastore2] 557dc011-44a1-4240-9596-d055d57e176f/557dc011-44a1-4240-9596-d055d57e176f.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 786.734817] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-169ecb6b-84e9-454f-89d1-759d0db1a59f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.756057] env[69367]: DEBUG oslo_vmware.api [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Waiting for the task: (returnval){ [ 786.756057] env[69367]: value = "task-4233991" [ 786.756057] env[69367]: _type = "Task" [ 786.756057] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.768269] env[69367]: DEBUG oslo_vmware.api [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': task-4233991, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.847620] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70a2cc1b-3d3c-4c71-b4df-64e5d0289c34 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.856265] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-990ca1e6-6ef4-4fd2-a20e-746a0180b5ae {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.886155] env[69367]: DEBUG nova.compute.manager [None req-191f9260-80f3-498c-9752-3dd1e1bfc50b tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: dd598b7a-057f-48ea-a31e-96e7ccadeb3d] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 786.889248] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62aad6dd-615e-441b-a298-124b4bc73558 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.901498] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d293ff5-6144-43d4-9fdf-90246110e2f8 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.905786] env[69367]: INFO nova.compute.manager [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] Took 43.19 seconds to build instance. [ 786.920505] env[69367]: DEBUG nova.compute.provider_tree [None req-c1bd6450-02b3-47f3-be26-375686994005 tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 787.266658] env[69367]: DEBUG oslo_vmware.api [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': task-4233991, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.315939] env[69367]: DEBUG oslo_concurrency.lockutils [None req-320c0afb-579c-4ab6-a9bf-6526f9c412f0 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 787.408472] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fd8398d1-16ed-4275-86a1-0099d2a35c5e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Lock "ab9d8e3e-65c5-4ac9-920f-3042b8cf2054" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 72.480s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 787.415865] env[69367]: DEBUG oslo_concurrency.lockutils [None req-191f9260-80f3-498c-9752-3dd1e1bfc50b tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 787.451764] env[69367]: ERROR nova.scheduler.client.report [None req-c1bd6450-02b3-47f3-be26-375686994005 tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] [req-7cc0b33e-7258-4623-b517-6e8a02334cd9] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-7cc0b33e-7258-4623-b517-6e8a02334cd9"}]} [ 787.452264] env[69367]: DEBUG oslo_concurrency.lockutils [None req-c1bd6450-02b3-47f3-be26-375686994005 tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.157s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 787.453407] env[69367]: ERROR nova.compute.manager [None req-c1bd6450-02b3-47f3-be26-375686994005 tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] [instance: f3386485-a173-4f5d-8f29-4972df3ae468] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 787.453407] env[69367]: ERROR nova.compute.manager [instance: f3386485-a173-4f5d-8f29-4972df3ae468] Traceback (most recent call last): [ 787.453407] env[69367]: ERROR nova.compute.manager [instance: f3386485-a173-4f5d-8f29-4972df3ae468] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 787.453407] env[69367]: ERROR nova.compute.manager [instance: f3386485-a173-4f5d-8f29-4972df3ae468] yield [ 787.453407] env[69367]: ERROR nova.compute.manager [instance: f3386485-a173-4f5d-8f29-4972df3ae468] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 787.453407] env[69367]: ERROR nova.compute.manager [instance: f3386485-a173-4f5d-8f29-4972df3ae468] self.set_inventory_for_provider( [ 787.453407] env[69367]: ERROR nova.compute.manager [instance: f3386485-a173-4f5d-8f29-4972df3ae468] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 787.453407] env[69367]: ERROR nova.compute.manager [instance: f3386485-a173-4f5d-8f29-4972df3ae468] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 787.453746] env[69367]: ERROR nova.compute.manager [instance: f3386485-a173-4f5d-8f29-4972df3ae468] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-7cc0b33e-7258-4623-b517-6e8a02334cd9"}]} [ 787.453746] env[69367]: ERROR nova.compute.manager [instance: f3386485-a173-4f5d-8f29-4972df3ae468] [ 787.453746] env[69367]: ERROR nova.compute.manager [instance: f3386485-a173-4f5d-8f29-4972df3ae468] During handling of the above exception, another exception occurred: [ 787.453746] env[69367]: ERROR nova.compute.manager [instance: f3386485-a173-4f5d-8f29-4972df3ae468] [ 787.453746] env[69367]: ERROR nova.compute.manager [instance: f3386485-a173-4f5d-8f29-4972df3ae468] Traceback (most recent call last): [ 787.453746] env[69367]: ERROR nova.compute.manager [instance: f3386485-a173-4f5d-8f29-4972df3ae468] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 787.453746] env[69367]: ERROR nova.compute.manager [instance: f3386485-a173-4f5d-8f29-4972df3ae468] with self.rt.instance_claim(context, instance, node, allocs, [ 787.453746] env[69367]: ERROR nova.compute.manager [instance: f3386485-a173-4f5d-8f29-4972df3ae468] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 787.453746] env[69367]: ERROR nova.compute.manager [instance: f3386485-a173-4f5d-8f29-4972df3ae468] return f(*args, **kwargs) [ 787.454125] env[69367]: ERROR nova.compute.manager [instance: f3386485-a173-4f5d-8f29-4972df3ae468] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 787.454125] env[69367]: ERROR nova.compute.manager [instance: f3386485-a173-4f5d-8f29-4972df3ae468] self._update(elevated, cn) [ 787.454125] env[69367]: ERROR nova.compute.manager [instance: f3386485-a173-4f5d-8f29-4972df3ae468] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 787.454125] env[69367]: ERROR nova.compute.manager [instance: f3386485-a173-4f5d-8f29-4972df3ae468] self._update_to_placement(context, compute_node, startup) [ 787.454125] env[69367]: ERROR nova.compute.manager [instance: f3386485-a173-4f5d-8f29-4972df3ae468] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 787.454125] env[69367]: ERROR nova.compute.manager [instance: f3386485-a173-4f5d-8f29-4972df3ae468] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 787.454125] env[69367]: ERROR nova.compute.manager [instance: f3386485-a173-4f5d-8f29-4972df3ae468] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 787.454125] env[69367]: ERROR nova.compute.manager [instance: f3386485-a173-4f5d-8f29-4972df3ae468] return attempt.get(self._wrap_exception) [ 787.454125] env[69367]: ERROR nova.compute.manager [instance: f3386485-a173-4f5d-8f29-4972df3ae468] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 787.454125] env[69367]: ERROR nova.compute.manager [instance: f3386485-a173-4f5d-8f29-4972df3ae468] six.reraise(self.value[0], self.value[1], self.value[2]) [ 787.454125] env[69367]: ERROR nova.compute.manager [instance: f3386485-a173-4f5d-8f29-4972df3ae468] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 787.454125] env[69367]: ERROR nova.compute.manager [instance: f3386485-a173-4f5d-8f29-4972df3ae468] raise value [ 787.454125] env[69367]: ERROR nova.compute.manager [instance: f3386485-a173-4f5d-8f29-4972df3ae468] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 787.454550] env[69367]: ERROR nova.compute.manager [instance: f3386485-a173-4f5d-8f29-4972df3ae468] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 787.454550] env[69367]: ERROR nova.compute.manager [instance: f3386485-a173-4f5d-8f29-4972df3ae468] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 787.454550] env[69367]: ERROR nova.compute.manager [instance: f3386485-a173-4f5d-8f29-4972df3ae468] self.reportclient.update_from_provider_tree( [ 787.454550] env[69367]: ERROR nova.compute.manager [instance: f3386485-a173-4f5d-8f29-4972df3ae468] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 787.454550] env[69367]: ERROR nova.compute.manager [instance: f3386485-a173-4f5d-8f29-4972df3ae468] with catch_all(pd.uuid): [ 787.454550] env[69367]: ERROR nova.compute.manager [instance: f3386485-a173-4f5d-8f29-4972df3ae468] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 787.454550] env[69367]: ERROR nova.compute.manager [instance: f3386485-a173-4f5d-8f29-4972df3ae468] self.gen.throw(typ, value, traceback) [ 787.454550] env[69367]: ERROR nova.compute.manager [instance: f3386485-a173-4f5d-8f29-4972df3ae468] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 787.454550] env[69367]: ERROR nova.compute.manager [instance: f3386485-a173-4f5d-8f29-4972df3ae468] raise exception.ResourceProviderSyncFailed() [ 787.454550] env[69367]: ERROR nova.compute.manager [instance: f3386485-a173-4f5d-8f29-4972df3ae468] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 787.454550] env[69367]: ERROR nova.compute.manager [instance: f3386485-a173-4f5d-8f29-4972df3ae468] [ 787.454900] env[69367]: DEBUG nova.compute.utils [None req-c1bd6450-02b3-47f3-be26-375686994005 tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] [instance: f3386485-a173-4f5d-8f29-4972df3ae468] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 787.455225] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2f962397-ad7d-47ec-8c2c-a1310a8036a7 tempest-ServersAdminNegativeTestJSON-2086084525 tempest-ServersAdminNegativeTestJSON-2086084525-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.541s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 787.456702] env[69367]: INFO nova.compute.claims [None req-2f962397-ad7d-47ec-8c2c-a1310a8036a7 tempest-ServersAdminNegativeTestJSON-2086084525 tempest-ServersAdminNegativeTestJSON-2086084525-project-member] [instance: 236173c7-9464-44b5-83a5-6ff60eedcc6a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 787.459597] env[69367]: DEBUG nova.compute.manager [None req-c1bd6450-02b3-47f3-be26-375686994005 tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] [instance: f3386485-a173-4f5d-8f29-4972df3ae468] Build of instance f3386485-a173-4f5d-8f29-4972df3ae468 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 787.459858] env[69367]: DEBUG nova.compute.manager [None req-c1bd6450-02b3-47f3-be26-375686994005 tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] [instance: f3386485-a173-4f5d-8f29-4972df3ae468] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 787.460016] env[69367]: DEBUG oslo_concurrency.lockutils [None req-c1bd6450-02b3-47f3-be26-375686994005 tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] Acquiring lock "refresh_cache-f3386485-a173-4f5d-8f29-4972df3ae468" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 787.460196] env[69367]: DEBUG oslo_concurrency.lockutils [None req-c1bd6450-02b3-47f3-be26-375686994005 tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] Acquired lock "refresh_cache-f3386485-a173-4f5d-8f29-4972df3ae468" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 787.460360] env[69367]: DEBUG nova.network.neutron [None req-c1bd6450-02b3-47f3-be26-375686994005 tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] [instance: f3386485-a173-4f5d-8f29-4972df3ae468] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 787.767873] env[69367]: DEBUG oslo_vmware.api [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': task-4233991, 'name': ReconfigVM_Task, 'duration_secs': 0.827419} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.769358] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Reconfigured VM instance instance-00000037 to attach disk [datastore2] 557dc011-44a1-4240-9596-d055d57e176f/557dc011-44a1-4240-9596-d055d57e176f.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 787.772154] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c0f90e26-ca37-450b-bb57-da5ddfc20877 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.780646] env[69367]: DEBUG oslo_vmware.api [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Waiting for the task: (returnval){ [ 787.780646] env[69367]: value = "task-4233992" [ 787.780646] env[69367]: _type = "Task" [ 787.780646] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.792154] env[69367]: DEBUG oslo_vmware.api [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': task-4233992, 'name': Rename_Task} progress is 5%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.911717] env[69367]: DEBUG nova.compute.manager [None req-3172fd0f-e37a-4da4-b881-902c58437d42 tempest-ServerMetadataNegativeTestJSON-995045374 tempest-ServerMetadataNegativeTestJSON-995045374-project-member] [instance: ac440ec4-8b1a-465a-a84d-66e8c823836b] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 787.989909] env[69367]: DEBUG nova.network.neutron [None req-c1bd6450-02b3-47f3-be26-375686994005 tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] [instance: f3386485-a173-4f5d-8f29-4972df3ae468] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 788.080797] env[69367]: DEBUG nova.network.neutron [None req-c1bd6450-02b3-47f3-be26-375686994005 tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] [instance: f3386485-a173-4f5d-8f29-4972df3ae468] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 788.199486] env[69367]: DEBUG oslo_concurrency.lockutils [None req-29fe1fd3-03fb-4277-958e-0ff9f92cf48e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Acquiring lock "7f937d89-684b-44f5-9f30-783aeafe99d1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 788.199752] env[69367]: DEBUG oslo_concurrency.lockutils [None req-29fe1fd3-03fb-4277-958e-0ff9f92cf48e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Lock "7f937d89-684b-44f5-9f30-783aeafe99d1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 788.200188] env[69367]: DEBUG oslo_concurrency.lockutils [None req-29fe1fd3-03fb-4277-958e-0ff9f92cf48e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Acquiring lock "7f937d89-684b-44f5-9f30-783aeafe99d1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 788.200453] env[69367]: DEBUG oslo_concurrency.lockutils [None req-29fe1fd3-03fb-4277-958e-0ff9f92cf48e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Lock "7f937d89-684b-44f5-9f30-783aeafe99d1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 788.200640] env[69367]: DEBUG oslo_concurrency.lockutils [None req-29fe1fd3-03fb-4277-958e-0ff9f92cf48e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Lock "7f937d89-684b-44f5-9f30-783aeafe99d1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 788.203528] env[69367]: INFO nova.compute.manager [None req-29fe1fd3-03fb-4277-958e-0ff9f92cf48e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] Terminating instance [ 788.275099] env[69367]: DEBUG oslo_concurrency.lockutils [None req-732e6b23-494d-4673-a2d3-4e5758dc78a7 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Acquiring lock "ab9d8e3e-65c5-4ac9-920f-3042b8cf2054" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 788.275483] env[69367]: DEBUG oslo_concurrency.lockutils [None req-732e6b23-494d-4673-a2d3-4e5758dc78a7 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Lock "ab9d8e3e-65c5-4ac9-920f-3042b8cf2054" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 788.276066] env[69367]: DEBUG oslo_concurrency.lockutils [None req-732e6b23-494d-4673-a2d3-4e5758dc78a7 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Acquiring lock "ab9d8e3e-65c5-4ac9-920f-3042b8cf2054-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 788.276066] env[69367]: DEBUG oslo_concurrency.lockutils [None req-732e6b23-494d-4673-a2d3-4e5758dc78a7 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Lock "ab9d8e3e-65c5-4ac9-920f-3042b8cf2054-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 788.276066] env[69367]: DEBUG oslo_concurrency.lockutils [None req-732e6b23-494d-4673-a2d3-4e5758dc78a7 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Lock "ab9d8e3e-65c5-4ac9-920f-3042b8cf2054-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 788.278067] env[69367]: INFO nova.compute.manager [None req-732e6b23-494d-4673-a2d3-4e5758dc78a7 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] Terminating instance [ 788.294549] env[69367]: DEBUG oslo_vmware.api [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': task-4233992, 'name': Rename_Task, 'duration_secs': 0.238835} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.295518] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Powering on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 788.295812] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2b738178-ab4f-4ef5-b1a2-7413ea854556 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.304631] env[69367]: DEBUG oslo_vmware.api [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Waiting for the task: (returnval){ [ 788.304631] env[69367]: value = "task-4233993" [ 788.304631] env[69367]: _type = "Task" [ 788.304631] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.316528] env[69367]: DEBUG oslo_vmware.api [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': task-4233993, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.436701] env[69367]: DEBUG oslo_concurrency.lockutils [None req-3172fd0f-e37a-4da4-b881-902c58437d42 tempest-ServerMetadataNegativeTestJSON-995045374 tempest-ServerMetadataNegativeTestJSON-995045374-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 788.490565] env[69367]: DEBUG nova.scheduler.client.report [None req-2f962397-ad7d-47ec-8c2c-a1310a8036a7 tempest-ServersAdminNegativeTestJSON-2086084525 tempest-ServersAdminNegativeTestJSON-2086084525-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 788.508436] env[69367]: DEBUG nova.scheduler.client.report [None req-2f962397-ad7d-47ec-8c2c-a1310a8036a7 tempest-ServersAdminNegativeTestJSON-2086084525 tempest-ServersAdminNegativeTestJSON-2086084525-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 788.508683] env[69367]: DEBUG nova.compute.provider_tree [None req-2f962397-ad7d-47ec-8c2c-a1310a8036a7 tempest-ServersAdminNegativeTestJSON-2086084525 tempest-ServersAdminNegativeTestJSON-2086084525-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 788.522579] env[69367]: DEBUG nova.scheduler.client.report [None req-2f962397-ad7d-47ec-8c2c-a1310a8036a7 tempest-ServersAdminNegativeTestJSON-2086084525 tempest-ServersAdminNegativeTestJSON-2086084525-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 788.550895] env[69367]: DEBUG nova.scheduler.client.report [None req-2f962397-ad7d-47ec-8c2c-a1310a8036a7 tempest-ServersAdminNegativeTestJSON-2086084525 tempest-ServersAdminNegativeTestJSON-2086084525-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 788.583621] env[69367]: DEBUG oslo_concurrency.lockutils [None req-c1bd6450-02b3-47f3-be26-375686994005 tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] Releasing lock "refresh_cache-f3386485-a173-4f5d-8f29-4972df3ae468" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 788.583906] env[69367]: DEBUG nova.compute.manager [None req-c1bd6450-02b3-47f3-be26-375686994005 tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 788.584128] env[69367]: DEBUG nova.compute.manager [None req-c1bd6450-02b3-47f3-be26-375686994005 tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] [instance: f3386485-a173-4f5d-8f29-4972df3ae468] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 788.584324] env[69367]: DEBUG nova.network.neutron [None req-c1bd6450-02b3-47f3-be26-375686994005 tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] [instance: f3386485-a173-4f5d-8f29-4972df3ae468] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 788.606683] env[69367]: DEBUG nova.network.neutron [None req-c1bd6450-02b3-47f3-be26-375686994005 tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] [instance: f3386485-a173-4f5d-8f29-4972df3ae468] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 788.711655] env[69367]: DEBUG nova.compute.manager [None req-29fe1fd3-03fb-4277-958e-0ff9f92cf48e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] Start destroying the instance on the hypervisor. {{(pid=69367) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 788.711972] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-29fe1fd3-03fb-4277-958e-0ff9f92cf48e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] Destroying instance {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 788.713639] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13e862b0-bb00-48cc-ab31-f494fe01e050 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.724619] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-29fe1fd3-03fb-4277-958e-0ff9f92cf48e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] Powering off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 788.728732] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f6d9c3d5-a056-4c19-ab1c-1c0018abe91c {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.738663] env[69367]: DEBUG oslo_vmware.api [None req-29fe1fd3-03fb-4277-958e-0ff9f92cf48e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Waiting for the task: (returnval){ [ 788.738663] env[69367]: value = "task-4233994" [ 788.738663] env[69367]: _type = "Task" [ 788.738663] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.755860] env[69367]: DEBUG oslo_vmware.api [None req-29fe1fd3-03fb-4277-958e-0ff9f92cf48e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': task-4233994, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.782471] env[69367]: DEBUG nova.compute.manager [None req-732e6b23-494d-4673-a2d3-4e5758dc78a7 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] Start destroying the instance on the hypervisor. {{(pid=69367) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 788.782861] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-732e6b23-494d-4673-a2d3-4e5758dc78a7 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] Destroying instance {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 788.784269] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fdf7ba4-5092-4b3d-8181-1677b4d65b86 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.801978] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-732e6b23-494d-4673-a2d3-4e5758dc78a7 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] Powering off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 788.802307] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-04b65abd-fcc7-4117-92ee-930f4786a973 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.816848] env[69367]: DEBUG oslo_vmware.api [None req-732e6b23-494d-4673-a2d3-4e5758dc78a7 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Waiting for the task: (returnval){ [ 788.816848] env[69367]: value = "task-4233995" [ 788.816848] env[69367]: _type = "Task" [ 788.816848] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.827133] env[69367]: DEBUG oslo_vmware.api [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': task-4233993, 'name': PowerOnVM_Task} progress is 96%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.836517] env[69367]: DEBUG oslo_vmware.api [None req-732e6b23-494d-4673-a2d3-4e5758dc78a7 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': task-4233995, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.105446] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d1376ef-ce3f-4be2-b53a-021b4599665b {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.109933] env[69367]: DEBUG nova.network.neutron [None req-c1bd6450-02b3-47f3-be26-375686994005 tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] [instance: f3386485-a173-4f5d-8f29-4972df3ae468] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 789.114514] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e380c19-adc5-4157-86e6-482c75d29639 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.145279] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a8ae518-6ee0-4774-bd68-13e7b8b15ac3 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.155543] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad91fade-d563-4bb5-a520-196ca3055e89 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.171768] env[69367]: DEBUG nova.compute.provider_tree [None req-2f962397-ad7d-47ec-8c2c-a1310a8036a7 tempest-ServersAdminNegativeTestJSON-2086084525 tempest-ServersAdminNegativeTestJSON-2086084525-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 789.250709] env[69367]: DEBUG oslo_vmware.api [None req-29fe1fd3-03fb-4277-958e-0ff9f92cf48e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': task-4233994, 'name': PowerOffVM_Task, 'duration_secs': 0.265028} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.250882] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-29fe1fd3-03fb-4277-958e-0ff9f92cf48e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] Powered off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 789.251042] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-29fe1fd3-03fb-4277-958e-0ff9f92cf48e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] Unregistering the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 789.251301] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-39870cc1-7448-4f19-a1d2-0a78d675b213 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.318653] env[69367]: DEBUG oslo_vmware.api [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': task-4233993, 'name': PowerOnVM_Task, 'duration_secs': 0.590481} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.324594] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Powered on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 789.324868] env[69367]: INFO nova.compute.manager [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Took 8.96 seconds to spawn the instance on the hypervisor. [ 789.325127] env[69367]: DEBUG nova.compute.manager [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Checking state {{(pid=69367) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 789.325503] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-29fe1fd3-03fb-4277-958e-0ff9f92cf48e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] Unregistered the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 789.325729] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-29fe1fd3-03fb-4277-958e-0ff9f92cf48e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] Deleting contents of the VM from datastore datastore2 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 789.325958] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-29fe1fd3-03fb-4277-958e-0ff9f92cf48e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Deleting the datastore file [datastore2] 7f937d89-684b-44f5-9f30-783aeafe99d1 {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 789.327017] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2807729-09e6-4d18-ba17-b9b917e088cf {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.330327] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d8726a23-7101-4057-a0f1-225f8f5e8f08 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.338948] env[69367]: DEBUG oslo_vmware.api [None req-732e6b23-494d-4673-a2d3-4e5758dc78a7 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': task-4233995, 'name': PowerOffVM_Task, 'duration_secs': 0.269263} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.341419] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-732e6b23-494d-4673-a2d3-4e5758dc78a7 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] Powered off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 789.341519] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-732e6b23-494d-4673-a2d3-4e5758dc78a7 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] Unregistering the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 789.344701] env[69367]: DEBUG oslo_vmware.api [None req-29fe1fd3-03fb-4277-958e-0ff9f92cf48e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Waiting for the task: (returnval){ [ 789.344701] env[69367]: value = "task-4233997" [ 789.344701] env[69367]: _type = "Task" [ 789.344701] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.345120] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a81c0482-365d-4b29-b7c3-c50e3f678880 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.356028] env[69367]: DEBUG oslo_vmware.api [None req-29fe1fd3-03fb-4277-958e-0ff9f92cf48e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': task-4233997, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.606045] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-732e6b23-494d-4673-a2d3-4e5758dc78a7 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] Unregistered the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 789.606045] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-732e6b23-494d-4673-a2d3-4e5758dc78a7 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] Deleting contents of the VM from datastore datastore2 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 789.606045] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-732e6b23-494d-4673-a2d3-4e5758dc78a7 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Deleting the datastore file [datastore2] ab9d8e3e-65c5-4ac9-920f-3042b8cf2054 {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 789.606045] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2317b1ac-4439-4110-83c8-14eef1f2d0c7 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.613633] env[69367]: DEBUG oslo_vmware.api [None req-732e6b23-494d-4673-a2d3-4e5758dc78a7 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Waiting for the task: (returnval){ [ 789.613633] env[69367]: value = "task-4233999" [ 789.613633] env[69367]: _type = "Task" [ 789.613633] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.614258] env[69367]: INFO nova.compute.manager [None req-c1bd6450-02b3-47f3-be26-375686994005 tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] [instance: f3386485-a173-4f5d-8f29-4972df3ae468] Took 1.03 seconds to deallocate network for instance. [ 789.626579] env[69367]: DEBUG oslo_vmware.api [None req-732e6b23-494d-4673-a2d3-4e5758dc78a7 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': task-4233999, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.694729] env[69367]: ERROR nova.scheduler.client.report [None req-2f962397-ad7d-47ec-8c2c-a1310a8036a7 tempest-ServersAdminNegativeTestJSON-2086084525 tempest-ServersAdminNegativeTestJSON-2086084525-project-member] [req-f11a1284-866d-431f-b40a-54a10b54ed54] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-f11a1284-866d-431f-b40a-54a10b54ed54"}]} [ 789.695114] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2f962397-ad7d-47ec-8c2c-a1310a8036a7 tempest-ServersAdminNegativeTestJSON-2086084525 tempest-ServersAdminNegativeTestJSON-2086084525-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.240s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 789.695708] env[69367]: ERROR nova.compute.manager [None req-2f962397-ad7d-47ec-8c2c-a1310a8036a7 tempest-ServersAdminNegativeTestJSON-2086084525 tempest-ServersAdminNegativeTestJSON-2086084525-project-member] [instance: 236173c7-9464-44b5-83a5-6ff60eedcc6a] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 789.695708] env[69367]: ERROR nova.compute.manager [instance: 236173c7-9464-44b5-83a5-6ff60eedcc6a] Traceback (most recent call last): [ 789.695708] env[69367]: ERROR nova.compute.manager [instance: 236173c7-9464-44b5-83a5-6ff60eedcc6a] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 789.695708] env[69367]: ERROR nova.compute.manager [instance: 236173c7-9464-44b5-83a5-6ff60eedcc6a] yield [ 789.695708] env[69367]: ERROR nova.compute.manager [instance: 236173c7-9464-44b5-83a5-6ff60eedcc6a] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 789.695708] env[69367]: ERROR nova.compute.manager [instance: 236173c7-9464-44b5-83a5-6ff60eedcc6a] self.set_inventory_for_provider( [ 789.695708] env[69367]: ERROR nova.compute.manager [instance: 236173c7-9464-44b5-83a5-6ff60eedcc6a] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 789.695708] env[69367]: ERROR nova.compute.manager [instance: 236173c7-9464-44b5-83a5-6ff60eedcc6a] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 789.695967] env[69367]: ERROR nova.compute.manager [instance: 236173c7-9464-44b5-83a5-6ff60eedcc6a] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-f11a1284-866d-431f-b40a-54a10b54ed54"}]} [ 789.695967] env[69367]: ERROR nova.compute.manager [instance: 236173c7-9464-44b5-83a5-6ff60eedcc6a] [ 789.695967] env[69367]: ERROR nova.compute.manager [instance: 236173c7-9464-44b5-83a5-6ff60eedcc6a] During handling of the above exception, another exception occurred: [ 789.695967] env[69367]: ERROR nova.compute.manager [instance: 236173c7-9464-44b5-83a5-6ff60eedcc6a] [ 789.695967] env[69367]: ERROR nova.compute.manager [instance: 236173c7-9464-44b5-83a5-6ff60eedcc6a] Traceback (most recent call last): [ 789.695967] env[69367]: ERROR nova.compute.manager [instance: 236173c7-9464-44b5-83a5-6ff60eedcc6a] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 789.695967] env[69367]: ERROR nova.compute.manager [instance: 236173c7-9464-44b5-83a5-6ff60eedcc6a] with self.rt.instance_claim(context, instance, node, allocs, [ 789.695967] env[69367]: ERROR nova.compute.manager [instance: 236173c7-9464-44b5-83a5-6ff60eedcc6a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 789.695967] env[69367]: ERROR nova.compute.manager [instance: 236173c7-9464-44b5-83a5-6ff60eedcc6a] return f(*args, **kwargs) [ 789.696290] env[69367]: ERROR nova.compute.manager [instance: 236173c7-9464-44b5-83a5-6ff60eedcc6a] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 789.696290] env[69367]: ERROR nova.compute.manager [instance: 236173c7-9464-44b5-83a5-6ff60eedcc6a] self._update(elevated, cn) [ 789.696290] env[69367]: ERROR nova.compute.manager [instance: 236173c7-9464-44b5-83a5-6ff60eedcc6a] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 789.696290] env[69367]: ERROR nova.compute.manager [instance: 236173c7-9464-44b5-83a5-6ff60eedcc6a] self._update_to_placement(context, compute_node, startup) [ 789.696290] env[69367]: ERROR nova.compute.manager [instance: 236173c7-9464-44b5-83a5-6ff60eedcc6a] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 789.696290] env[69367]: ERROR nova.compute.manager [instance: 236173c7-9464-44b5-83a5-6ff60eedcc6a] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 789.696290] env[69367]: ERROR nova.compute.manager [instance: 236173c7-9464-44b5-83a5-6ff60eedcc6a] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 789.696290] env[69367]: ERROR nova.compute.manager [instance: 236173c7-9464-44b5-83a5-6ff60eedcc6a] return attempt.get(self._wrap_exception) [ 789.696290] env[69367]: ERROR nova.compute.manager [instance: 236173c7-9464-44b5-83a5-6ff60eedcc6a] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 789.696290] env[69367]: ERROR nova.compute.manager [instance: 236173c7-9464-44b5-83a5-6ff60eedcc6a] six.reraise(self.value[0], self.value[1], self.value[2]) [ 789.696290] env[69367]: ERROR nova.compute.manager [instance: 236173c7-9464-44b5-83a5-6ff60eedcc6a] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 789.696290] env[69367]: ERROR nova.compute.manager [instance: 236173c7-9464-44b5-83a5-6ff60eedcc6a] raise value [ 789.696290] env[69367]: ERROR nova.compute.manager [instance: 236173c7-9464-44b5-83a5-6ff60eedcc6a] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 789.696696] env[69367]: ERROR nova.compute.manager [instance: 236173c7-9464-44b5-83a5-6ff60eedcc6a] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 789.696696] env[69367]: ERROR nova.compute.manager [instance: 236173c7-9464-44b5-83a5-6ff60eedcc6a] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 789.696696] env[69367]: ERROR nova.compute.manager [instance: 236173c7-9464-44b5-83a5-6ff60eedcc6a] self.reportclient.update_from_provider_tree( [ 789.696696] env[69367]: ERROR nova.compute.manager [instance: 236173c7-9464-44b5-83a5-6ff60eedcc6a] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 789.696696] env[69367]: ERROR nova.compute.manager [instance: 236173c7-9464-44b5-83a5-6ff60eedcc6a] with catch_all(pd.uuid): [ 789.696696] env[69367]: ERROR nova.compute.manager [instance: 236173c7-9464-44b5-83a5-6ff60eedcc6a] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 789.696696] env[69367]: ERROR nova.compute.manager [instance: 236173c7-9464-44b5-83a5-6ff60eedcc6a] self.gen.throw(typ, value, traceback) [ 789.696696] env[69367]: ERROR nova.compute.manager [instance: 236173c7-9464-44b5-83a5-6ff60eedcc6a] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 789.696696] env[69367]: ERROR nova.compute.manager [instance: 236173c7-9464-44b5-83a5-6ff60eedcc6a] raise exception.ResourceProviderSyncFailed() [ 789.696696] env[69367]: ERROR nova.compute.manager [instance: 236173c7-9464-44b5-83a5-6ff60eedcc6a] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 789.696696] env[69367]: ERROR nova.compute.manager [instance: 236173c7-9464-44b5-83a5-6ff60eedcc6a] [ 789.697059] env[69367]: DEBUG nova.compute.utils [None req-2f962397-ad7d-47ec-8c2c-a1310a8036a7 tempest-ServersAdminNegativeTestJSON-2086084525 tempest-ServersAdminNegativeTestJSON-2086084525-project-member] [instance: 236173c7-9464-44b5-83a5-6ff60eedcc6a] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 789.697768] env[69367]: DEBUG oslo_concurrency.lockutils [None req-515f9e32-548d-413f-84ea-987c928671cb tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.585s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 789.699116] env[69367]: INFO nova.compute.claims [None req-515f9e32-548d-413f-84ea-987c928671cb tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] [instance: 8ee84a56-cc49-4056-b561-aa1f2b10a06c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 789.702520] env[69367]: DEBUG nova.compute.manager [None req-2f962397-ad7d-47ec-8c2c-a1310a8036a7 tempest-ServersAdminNegativeTestJSON-2086084525 tempest-ServersAdminNegativeTestJSON-2086084525-project-member] [instance: 236173c7-9464-44b5-83a5-6ff60eedcc6a] Build of instance 236173c7-9464-44b5-83a5-6ff60eedcc6a was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 789.702975] env[69367]: DEBUG nova.compute.manager [None req-2f962397-ad7d-47ec-8c2c-a1310a8036a7 tempest-ServersAdminNegativeTestJSON-2086084525 tempest-ServersAdminNegativeTestJSON-2086084525-project-member] [instance: 236173c7-9464-44b5-83a5-6ff60eedcc6a] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 789.703403] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2f962397-ad7d-47ec-8c2c-a1310a8036a7 tempest-ServersAdminNegativeTestJSON-2086084525 tempest-ServersAdminNegativeTestJSON-2086084525-project-member] Acquiring lock "refresh_cache-236173c7-9464-44b5-83a5-6ff60eedcc6a" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 789.703570] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2f962397-ad7d-47ec-8c2c-a1310a8036a7 tempest-ServersAdminNegativeTestJSON-2086084525 tempest-ServersAdminNegativeTestJSON-2086084525-project-member] Acquired lock "refresh_cache-236173c7-9464-44b5-83a5-6ff60eedcc6a" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 789.703738] env[69367]: DEBUG nova.network.neutron [None req-2f962397-ad7d-47ec-8c2c-a1310a8036a7 tempest-ServersAdminNegativeTestJSON-2086084525 tempest-ServersAdminNegativeTestJSON-2086084525-project-member] [instance: 236173c7-9464-44b5-83a5-6ff60eedcc6a] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 789.857822] env[69367]: INFO nova.compute.manager [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Took 43.34 seconds to build instance. [ 789.863618] env[69367]: DEBUG oslo_vmware.api [None req-29fe1fd3-03fb-4277-958e-0ff9f92cf48e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': task-4233997, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.368229} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.863952] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-29fe1fd3-03fb-4277-958e-0ff9f92cf48e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Deleted the datastore file {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 789.864258] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-29fe1fd3-03fb-4277-958e-0ff9f92cf48e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] Deleted contents of the VM from datastore datastore2 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 789.864511] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-29fe1fd3-03fb-4277-958e-0ff9f92cf48e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] Instance destroyed {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 789.864766] env[69367]: INFO nova.compute.manager [None req-29fe1fd3-03fb-4277-958e-0ff9f92cf48e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] Took 1.15 seconds to destroy the instance on the hypervisor. [ 789.865122] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-29fe1fd3-03fb-4277-958e-0ff9f92cf48e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 789.865322] env[69367]: DEBUG nova.compute.manager [-] [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 789.865497] env[69367]: DEBUG nova.network.neutron [-] [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 790.132066] env[69367]: DEBUG oslo_vmware.api [None req-732e6b23-494d-4673-a2d3-4e5758dc78a7 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Task: {'id': task-4233999, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.325117} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.132355] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-732e6b23-494d-4673-a2d3-4e5758dc78a7 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Deleted the datastore file {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 790.132552] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-732e6b23-494d-4673-a2d3-4e5758dc78a7 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] Deleted contents of the VM from datastore datastore2 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 790.132732] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-732e6b23-494d-4673-a2d3-4e5758dc78a7 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] Instance destroyed {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 790.132907] env[69367]: INFO nova.compute.manager [None req-732e6b23-494d-4673-a2d3-4e5758dc78a7 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] Took 1.35 seconds to destroy the instance on the hypervisor. [ 790.133774] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-732e6b23-494d-4673-a2d3-4e5758dc78a7 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 790.133774] env[69367]: DEBUG nova.compute.manager [-] [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 790.133774] env[69367]: DEBUG nova.network.neutron [-] [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 790.179363] env[69367]: DEBUG nova.compute.manager [req-d0a30801-4e59-48ca-878a-e6709dbea624 req-dd8f456a-e7cf-4b3f-b6ee-09edf7aa0665 service nova] [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] Received event network-vif-deleted-12a5e1ba-1f50-4421-a715-d2e4cbb74000 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 790.179580] env[69367]: INFO nova.compute.manager [req-d0a30801-4e59-48ca-878a-e6709dbea624 req-dd8f456a-e7cf-4b3f-b6ee-09edf7aa0665 service nova] [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] Neutron deleted interface 12a5e1ba-1f50-4421-a715-d2e4cbb74000; detaching it from the instance and deleting it from the info cache [ 790.179777] env[69367]: DEBUG nova.network.neutron [req-d0a30801-4e59-48ca-878a-e6709dbea624 req-dd8f456a-e7cf-4b3f-b6ee-09edf7aa0665 service nova] [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 790.225153] env[69367]: DEBUG nova.network.neutron [None req-2f962397-ad7d-47ec-8c2c-a1310a8036a7 tempest-ServersAdminNegativeTestJSON-2086084525 tempest-ServersAdminNegativeTestJSON-2086084525-project-member] [instance: 236173c7-9464-44b5-83a5-6ff60eedcc6a] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 790.300590] env[69367]: DEBUG nova.network.neutron [None req-2f962397-ad7d-47ec-8c2c-a1310a8036a7 tempest-ServersAdminNegativeTestJSON-2086084525 tempest-ServersAdminNegativeTestJSON-2086084525-project-member] [instance: 236173c7-9464-44b5-83a5-6ff60eedcc6a] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 790.360356] env[69367]: DEBUG oslo_concurrency.lockutils [None req-7db65717-e9d4-4216-a205-1bcede4b437d tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Lock "557dc011-44a1-4240-9596-d055d57e176f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 68.971s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 790.470243] env[69367]: DEBUG nova.compute.manager [req-b7729834-df1f-4fd8-aca4-594ace46713e req-b8c8ac54-a4b1-4e37-84f7-47563a7218e4 service nova] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Received event network-changed-cb95db9d-9279-4fd5-bf3d-c4edcbfd26c1 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 790.470492] env[69367]: DEBUG nova.compute.manager [req-b7729834-df1f-4fd8-aca4-594ace46713e req-b8c8ac54-a4b1-4e37-84f7-47563a7218e4 service nova] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Refreshing instance network info cache due to event network-changed-cb95db9d-9279-4fd5-bf3d-c4edcbfd26c1. {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11772}} [ 790.470808] env[69367]: DEBUG oslo_concurrency.lockutils [req-b7729834-df1f-4fd8-aca4-594ace46713e req-b8c8ac54-a4b1-4e37-84f7-47563a7218e4 service nova] Acquiring lock "refresh_cache-557dc011-44a1-4240-9596-d055d57e176f" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 790.470877] env[69367]: DEBUG oslo_concurrency.lockutils [req-b7729834-df1f-4fd8-aca4-594ace46713e req-b8c8ac54-a4b1-4e37-84f7-47563a7218e4 service nova] Acquired lock "refresh_cache-557dc011-44a1-4240-9596-d055d57e176f" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 790.471105] env[69367]: DEBUG nova.network.neutron [req-b7729834-df1f-4fd8-aca4-594ace46713e req-b8c8ac54-a4b1-4e37-84f7-47563a7218e4 service nova] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Refreshing network info cache for port cb95db9d-9279-4fd5-bf3d-c4edcbfd26c1 {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 790.663360] env[69367]: INFO nova.scheduler.client.report [None req-c1bd6450-02b3-47f3-be26-375686994005 tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] Deleted allocations for instance f3386485-a173-4f5d-8f29-4972df3ae468 [ 790.669766] env[69367]: DEBUG nova.network.neutron [-] [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 790.687670] env[69367]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dee254b6-dc1c-4bcc-aa58-d320e9f2430f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.709359] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e8ba301-8f74-497b-8e67-00f752e39519 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.746078] env[69367]: DEBUG nova.compute.manager [req-d0a30801-4e59-48ca-878a-e6709dbea624 req-dd8f456a-e7cf-4b3f-b6ee-09edf7aa0665 service nova] [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] Detach interface failed, port_id=12a5e1ba-1f50-4421-a715-d2e4cbb74000, reason: Instance 7f937d89-684b-44f5-9f30-783aeafe99d1 could not be found. {{(pid=69367) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11601}} [ 790.749428] env[69367]: DEBUG nova.scheduler.client.report [None req-515f9e32-548d-413f-84ea-987c928671cb tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 790.786008] env[69367]: DEBUG nova.scheduler.client.report [None req-515f9e32-548d-413f-84ea-987c928671cb tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 790.786598] env[69367]: DEBUG nova.compute.provider_tree [None req-515f9e32-548d-413f-84ea-987c928671cb tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 790.803815] env[69367]: DEBUG nova.scheduler.client.report [None req-515f9e32-548d-413f-84ea-987c928671cb tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 790.806188] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2f962397-ad7d-47ec-8c2c-a1310a8036a7 tempest-ServersAdminNegativeTestJSON-2086084525 tempest-ServersAdminNegativeTestJSON-2086084525-project-member] Releasing lock "refresh_cache-236173c7-9464-44b5-83a5-6ff60eedcc6a" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 790.806423] env[69367]: DEBUG nova.compute.manager [None req-2f962397-ad7d-47ec-8c2c-a1310a8036a7 tempest-ServersAdminNegativeTestJSON-2086084525 tempest-ServersAdminNegativeTestJSON-2086084525-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 790.806591] env[69367]: DEBUG nova.compute.manager [None req-2f962397-ad7d-47ec-8c2c-a1310a8036a7 tempest-ServersAdminNegativeTestJSON-2086084525 tempest-ServersAdminNegativeTestJSON-2086084525-project-member] [instance: 236173c7-9464-44b5-83a5-6ff60eedcc6a] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 790.806767] env[69367]: DEBUG nova.network.neutron [None req-2f962397-ad7d-47ec-8c2c-a1310a8036a7 tempest-ServersAdminNegativeTestJSON-2086084525 tempest-ServersAdminNegativeTestJSON-2086084525-project-member] [instance: 236173c7-9464-44b5-83a5-6ff60eedcc6a] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 790.822767] env[69367]: DEBUG nova.network.neutron [None req-2f962397-ad7d-47ec-8c2c-a1310a8036a7 tempest-ServersAdminNegativeTestJSON-2086084525 tempest-ServersAdminNegativeTestJSON-2086084525-project-member] [instance: 236173c7-9464-44b5-83a5-6ff60eedcc6a] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 790.833470] env[69367]: DEBUG nova.scheduler.client.report [None req-515f9e32-548d-413f-84ea-987c928671cb tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 790.863782] env[69367]: DEBUG nova.compute.manager [None req-5687842f-f711-41da-ac00-7ef2110513fe tempest-ServerRescueTestJSONUnderV235-356240681 tempest-ServerRescueTestJSONUnderV235-356240681-project-member] [instance: b6d326ff-45aa-44b6-b99c-95edca647e2c] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 790.954400] env[69367]: DEBUG nova.network.neutron [-] [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 791.177962] env[69367]: INFO nova.compute.manager [-] [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] Took 1.31 seconds to deallocate network for instance. [ 791.178519] env[69367]: DEBUG oslo_concurrency.lockutils [None req-c1bd6450-02b3-47f3-be26-375686994005 tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] Lock "f3386485-a173-4f5d-8f29-4972df3ae468" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 64.884s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 791.272250] env[69367]: DEBUG nova.network.neutron [req-b7729834-df1f-4fd8-aca4-594ace46713e req-b8c8ac54-a4b1-4e37-84f7-47563a7218e4 service nova] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Updated VIF entry in instance network info cache for port cb95db9d-9279-4fd5-bf3d-c4edcbfd26c1. {{(pid=69367) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 791.272612] env[69367]: DEBUG nova.network.neutron [req-b7729834-df1f-4fd8-aca4-594ace46713e req-b8c8ac54-a4b1-4e37-84f7-47563a7218e4 service nova] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Updating instance_info_cache with network_info: [{"id": "cb95db9d-9279-4fd5-bf3d-c4edcbfd26c1", "address": "fa:16:3e:ff:f7:e3", "network": {"id": "0bfdc337-bb57-4c33-9907-9098384ed460", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1159515822-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.190", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd27807405a646e989b95325358a87eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9bb629cd-6d0f-4bed-965c-bd04a2f3ec49", "external-id": "nsx-vlan-transportzone-848", "segmentation_id": 848, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcb95db9d-92", "ovs_interfaceid": "cb95db9d-9279-4fd5-bf3d-c4edcbfd26c1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 791.275362] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5181e29c-4b25-421b-b961-958f459f9617 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.283092] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9d61af9-4cf9-4909-b590-195e6efc6ab2 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.315393] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c562b80-5bb0-47b5-980e-b43703fef5cc {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.323320] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b05735f-407b-4b34-b6c5-65064d3f7f3f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.327538] env[69367]: DEBUG nova.network.neutron [None req-2f962397-ad7d-47ec-8c2c-a1310a8036a7 tempest-ServersAdminNegativeTestJSON-2086084525 tempest-ServersAdminNegativeTestJSON-2086084525-project-member] [instance: 236173c7-9464-44b5-83a5-6ff60eedcc6a] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 791.339089] env[69367]: DEBUG nova.compute.provider_tree [None req-515f9e32-548d-413f-84ea-987c928671cb tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 791.382779] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5687842f-f711-41da-ac00-7ef2110513fe tempest-ServerRescueTestJSONUnderV235-356240681 tempest-ServerRescueTestJSONUnderV235-356240681-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 791.457183] env[69367]: INFO nova.compute.manager [-] [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] Took 1.32 seconds to deallocate network for instance. [ 791.687030] env[69367]: DEBUG nova.compute.manager [None req-2b7dd111-b8dc-4eab-92ec-fa522d44b4d5 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] [instance: eab70948-bb67-4f56-9f35-65e164fd5990] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 791.687928] env[69367]: DEBUG oslo_concurrency.lockutils [None req-29fe1fd3-03fb-4277-958e-0ff9f92cf48e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 791.779176] env[69367]: DEBUG oslo_concurrency.lockutils [req-b7729834-df1f-4fd8-aca4-594ace46713e req-b8c8ac54-a4b1-4e37-84f7-47563a7218e4 service nova] Releasing lock "refresh_cache-557dc011-44a1-4240-9596-d055d57e176f" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 791.831654] env[69367]: INFO nova.compute.manager [None req-2f962397-ad7d-47ec-8c2c-a1310a8036a7 tempest-ServersAdminNegativeTestJSON-2086084525 tempest-ServersAdminNegativeTestJSON-2086084525-project-member] [instance: 236173c7-9464-44b5-83a5-6ff60eedcc6a] Took 1.02 seconds to deallocate network for instance. [ 791.864941] env[69367]: ERROR nova.scheduler.client.report [None req-515f9e32-548d-413f-84ea-987c928671cb tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] [req-41cd6d7f-b9b1-405d-9a4a-390c2ed81cf1] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-41cd6d7f-b9b1-405d-9a4a-390c2ed81cf1"}]} [ 791.865434] env[69367]: DEBUG oslo_concurrency.lockutils [None req-515f9e32-548d-413f-84ea-987c928671cb tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.168s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 791.866045] env[69367]: ERROR nova.compute.manager [None req-515f9e32-548d-413f-84ea-987c928671cb tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] [instance: 8ee84a56-cc49-4056-b561-aa1f2b10a06c] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 791.866045] env[69367]: ERROR nova.compute.manager [instance: 8ee84a56-cc49-4056-b561-aa1f2b10a06c] Traceback (most recent call last): [ 791.866045] env[69367]: ERROR nova.compute.manager [instance: 8ee84a56-cc49-4056-b561-aa1f2b10a06c] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 791.866045] env[69367]: ERROR nova.compute.manager [instance: 8ee84a56-cc49-4056-b561-aa1f2b10a06c] yield [ 791.866045] env[69367]: ERROR nova.compute.manager [instance: 8ee84a56-cc49-4056-b561-aa1f2b10a06c] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 791.866045] env[69367]: ERROR nova.compute.manager [instance: 8ee84a56-cc49-4056-b561-aa1f2b10a06c] self.set_inventory_for_provider( [ 791.866045] env[69367]: ERROR nova.compute.manager [instance: 8ee84a56-cc49-4056-b561-aa1f2b10a06c] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 791.866045] env[69367]: ERROR nova.compute.manager [instance: 8ee84a56-cc49-4056-b561-aa1f2b10a06c] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 791.866325] env[69367]: ERROR nova.compute.manager [instance: 8ee84a56-cc49-4056-b561-aa1f2b10a06c] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-41cd6d7f-b9b1-405d-9a4a-390c2ed81cf1"}]} [ 791.866325] env[69367]: ERROR nova.compute.manager [instance: 8ee84a56-cc49-4056-b561-aa1f2b10a06c] [ 791.866325] env[69367]: ERROR nova.compute.manager [instance: 8ee84a56-cc49-4056-b561-aa1f2b10a06c] During handling of the above exception, another exception occurred: [ 791.866325] env[69367]: ERROR nova.compute.manager [instance: 8ee84a56-cc49-4056-b561-aa1f2b10a06c] [ 791.866325] env[69367]: ERROR nova.compute.manager [instance: 8ee84a56-cc49-4056-b561-aa1f2b10a06c] Traceback (most recent call last): [ 791.866325] env[69367]: ERROR nova.compute.manager [instance: 8ee84a56-cc49-4056-b561-aa1f2b10a06c] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 791.866325] env[69367]: ERROR nova.compute.manager [instance: 8ee84a56-cc49-4056-b561-aa1f2b10a06c] with self.rt.instance_claim(context, instance, node, allocs, [ 791.866325] env[69367]: ERROR nova.compute.manager [instance: 8ee84a56-cc49-4056-b561-aa1f2b10a06c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 791.866325] env[69367]: ERROR nova.compute.manager [instance: 8ee84a56-cc49-4056-b561-aa1f2b10a06c] return f(*args, **kwargs) [ 791.866640] env[69367]: ERROR nova.compute.manager [instance: 8ee84a56-cc49-4056-b561-aa1f2b10a06c] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 791.866640] env[69367]: ERROR nova.compute.manager [instance: 8ee84a56-cc49-4056-b561-aa1f2b10a06c] self._update(elevated, cn) [ 791.866640] env[69367]: ERROR nova.compute.manager [instance: 8ee84a56-cc49-4056-b561-aa1f2b10a06c] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 791.866640] env[69367]: ERROR nova.compute.manager [instance: 8ee84a56-cc49-4056-b561-aa1f2b10a06c] self._update_to_placement(context, compute_node, startup) [ 791.866640] env[69367]: ERROR nova.compute.manager [instance: 8ee84a56-cc49-4056-b561-aa1f2b10a06c] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 791.866640] env[69367]: ERROR nova.compute.manager [instance: 8ee84a56-cc49-4056-b561-aa1f2b10a06c] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 791.866640] env[69367]: ERROR nova.compute.manager [instance: 8ee84a56-cc49-4056-b561-aa1f2b10a06c] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 791.866640] env[69367]: ERROR nova.compute.manager [instance: 8ee84a56-cc49-4056-b561-aa1f2b10a06c] return attempt.get(self._wrap_exception) [ 791.866640] env[69367]: ERROR nova.compute.manager [instance: 8ee84a56-cc49-4056-b561-aa1f2b10a06c] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 791.866640] env[69367]: ERROR nova.compute.manager [instance: 8ee84a56-cc49-4056-b561-aa1f2b10a06c] six.reraise(self.value[0], self.value[1], self.value[2]) [ 791.866640] env[69367]: ERROR nova.compute.manager [instance: 8ee84a56-cc49-4056-b561-aa1f2b10a06c] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 791.866640] env[69367]: ERROR nova.compute.manager [instance: 8ee84a56-cc49-4056-b561-aa1f2b10a06c] raise value [ 791.866640] env[69367]: ERROR nova.compute.manager [instance: 8ee84a56-cc49-4056-b561-aa1f2b10a06c] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 791.867077] env[69367]: ERROR nova.compute.manager [instance: 8ee84a56-cc49-4056-b561-aa1f2b10a06c] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 791.867077] env[69367]: ERROR nova.compute.manager [instance: 8ee84a56-cc49-4056-b561-aa1f2b10a06c] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 791.867077] env[69367]: ERROR nova.compute.manager [instance: 8ee84a56-cc49-4056-b561-aa1f2b10a06c] self.reportclient.update_from_provider_tree( [ 791.867077] env[69367]: ERROR nova.compute.manager [instance: 8ee84a56-cc49-4056-b561-aa1f2b10a06c] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 791.867077] env[69367]: ERROR nova.compute.manager [instance: 8ee84a56-cc49-4056-b561-aa1f2b10a06c] with catch_all(pd.uuid): [ 791.867077] env[69367]: ERROR nova.compute.manager [instance: 8ee84a56-cc49-4056-b561-aa1f2b10a06c] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 791.867077] env[69367]: ERROR nova.compute.manager [instance: 8ee84a56-cc49-4056-b561-aa1f2b10a06c] self.gen.throw(typ, value, traceback) [ 791.867077] env[69367]: ERROR nova.compute.manager [instance: 8ee84a56-cc49-4056-b561-aa1f2b10a06c] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 791.867077] env[69367]: ERROR nova.compute.manager [instance: 8ee84a56-cc49-4056-b561-aa1f2b10a06c] raise exception.ResourceProviderSyncFailed() [ 791.867077] env[69367]: ERROR nova.compute.manager [instance: 8ee84a56-cc49-4056-b561-aa1f2b10a06c] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 791.867077] env[69367]: ERROR nova.compute.manager [instance: 8ee84a56-cc49-4056-b561-aa1f2b10a06c] [ 791.867433] env[69367]: DEBUG nova.compute.utils [None req-515f9e32-548d-413f-84ea-987c928671cb tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] [instance: 8ee84a56-cc49-4056-b561-aa1f2b10a06c] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 791.870783] env[69367]: DEBUG oslo_concurrency.lockutils [None req-6853d6a0-2173-46e7-8542-239f48c4a2c9 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.712s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 791.871078] env[69367]: DEBUG nova.objects.instance [None req-6853d6a0-2173-46e7-8542-239f48c4a2c9 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Lazy-loading 'resources' on Instance uuid e1c7d100-4ad7-4871-970f-bb7562bfc6fc {{(pid=69367) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 791.872936] env[69367]: DEBUG nova.compute.manager [None req-515f9e32-548d-413f-84ea-987c928671cb tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] [instance: 8ee84a56-cc49-4056-b561-aa1f2b10a06c] Build of instance 8ee84a56-cc49-4056-b561-aa1f2b10a06c was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 791.874389] env[69367]: DEBUG nova.compute.manager [None req-515f9e32-548d-413f-84ea-987c928671cb tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] [instance: 8ee84a56-cc49-4056-b561-aa1f2b10a06c] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 791.874659] env[69367]: DEBUG oslo_concurrency.lockutils [None req-515f9e32-548d-413f-84ea-987c928671cb tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] Acquiring lock "refresh_cache-8ee84a56-cc49-4056-b561-aa1f2b10a06c" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 791.876239] env[69367]: DEBUG oslo_concurrency.lockutils [None req-515f9e32-548d-413f-84ea-987c928671cb tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] Acquired lock "refresh_cache-8ee84a56-cc49-4056-b561-aa1f2b10a06c" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 791.876453] env[69367]: DEBUG nova.network.neutron [None req-515f9e32-548d-413f-84ea-987c928671cb tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] [instance: 8ee84a56-cc49-4056-b561-aa1f2b10a06c] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 791.965770] env[69367]: DEBUG oslo_concurrency.lockutils [None req-732e6b23-494d-4673-a2d3-4e5758dc78a7 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 792.211518] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2b7dd111-b8dc-4eab-92ec-fa522d44b4d5 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 792.214247] env[69367]: DEBUG nova.compute.manager [req-ae0c2653-b1d3-4b7a-a525-3a9cd4f97ae9 req-713b0fd7-1309-4cdc-805b-638d94dc8cf4 service nova] [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] Received event network-vif-deleted-1da2a1bd-4886-4d63-82a9-cd4fdd2c31d5 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 792.393095] env[69367]: DEBUG nova.scheduler.client.report [None req-6853d6a0-2173-46e7-8542-239f48c4a2c9 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 792.397078] env[69367]: DEBUG nova.network.neutron [None req-515f9e32-548d-413f-84ea-987c928671cb tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] [instance: 8ee84a56-cc49-4056-b561-aa1f2b10a06c] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 792.407222] env[69367]: DEBUG nova.scheduler.client.report [None req-6853d6a0-2173-46e7-8542-239f48c4a2c9 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 792.407448] env[69367]: DEBUG nova.compute.provider_tree [None req-6853d6a0-2173-46e7-8542-239f48c4a2c9 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 792.419040] env[69367]: DEBUG nova.scheduler.client.report [None req-6853d6a0-2173-46e7-8542-239f48c4a2c9 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 792.441630] env[69367]: DEBUG nova.scheduler.client.report [None req-6853d6a0-2173-46e7-8542-239f48c4a2c9 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 792.476286] env[69367]: DEBUG nova.network.neutron [None req-515f9e32-548d-413f-84ea-987c928671cb tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] [instance: 8ee84a56-cc49-4056-b561-aa1f2b10a06c] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 792.808635] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4de066d7-99df-490a-99e0-aed898cbd9f1 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.816801] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc3810d5-ac8a-4bf4-b719-edede9f387ec {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.852540] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95d2ec9a-e759-4e5b-9345-c6e844226e34 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.860743] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04d4e912-f68b-4449-a815-f9a00e08d957 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.867323] env[69367]: INFO nova.scheduler.client.report [None req-2f962397-ad7d-47ec-8c2c-a1310a8036a7 tempest-ServersAdminNegativeTestJSON-2086084525 tempest-ServersAdminNegativeTestJSON-2086084525-project-member] Deleted allocations for instance 236173c7-9464-44b5-83a5-6ff60eedcc6a [ 792.880481] env[69367]: DEBUG nova.compute.provider_tree [None req-6853d6a0-2173-46e7-8542-239f48c4a2c9 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 792.981174] env[69367]: DEBUG oslo_concurrency.lockutils [None req-515f9e32-548d-413f-84ea-987c928671cb tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] Releasing lock "refresh_cache-8ee84a56-cc49-4056-b561-aa1f2b10a06c" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 792.981174] env[69367]: DEBUG nova.compute.manager [None req-515f9e32-548d-413f-84ea-987c928671cb tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 792.981174] env[69367]: DEBUG nova.compute.manager [None req-515f9e32-548d-413f-84ea-987c928671cb tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] [instance: 8ee84a56-cc49-4056-b561-aa1f2b10a06c] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 792.981174] env[69367]: DEBUG nova.network.neutron [None req-515f9e32-548d-413f-84ea-987c928671cb tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] [instance: 8ee84a56-cc49-4056-b561-aa1f2b10a06c] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 792.998030] env[69367]: DEBUG nova.network.neutron [None req-515f9e32-548d-413f-84ea-987c928671cb tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] [instance: 8ee84a56-cc49-4056-b561-aa1f2b10a06c] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 793.386515] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2f962397-ad7d-47ec-8c2c-a1310a8036a7 tempest-ServersAdminNegativeTestJSON-2086084525 tempest-ServersAdminNegativeTestJSON-2086084525-project-member] Lock "236173c7-9464-44b5-83a5-6ff60eedcc6a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 66.799s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 793.405393] env[69367]: ERROR nova.scheduler.client.report [None req-6853d6a0-2173-46e7-8542-239f48c4a2c9 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [req-2cc171e0-67c5-4d1d-bf2a-7c157c7b7f80] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-2cc171e0-67c5-4d1d-bf2a-7c157c7b7f80"}]} [ 793.405393] env[69367]: DEBUG oslo_concurrency.lockutils [None req-6853d6a0-2173-46e7-8542-239f48c4a2c9 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.534s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 793.405771] env[69367]: ERROR nova.compute.manager [None req-6853d6a0-2173-46e7-8542-239f48c4a2c9 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] Setting instance vm_state to ERROR: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 793.405771] env[69367]: ERROR nova.compute.manager [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] Traceback (most recent call last): [ 793.405771] env[69367]: ERROR nova.compute.manager [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 793.405771] env[69367]: ERROR nova.compute.manager [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] yield [ 793.405771] env[69367]: ERROR nova.compute.manager [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 793.405771] env[69367]: ERROR nova.compute.manager [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] self.set_inventory_for_provider( [ 793.405771] env[69367]: ERROR nova.compute.manager [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 793.405771] env[69367]: ERROR nova.compute.manager [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 793.406017] env[69367]: ERROR nova.compute.manager [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-2cc171e0-67c5-4d1d-bf2a-7c157c7b7f80"}]} [ 793.406017] env[69367]: ERROR nova.compute.manager [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] [ 793.406017] env[69367]: ERROR nova.compute.manager [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] During handling of the above exception, another exception occurred: [ 793.406017] env[69367]: ERROR nova.compute.manager [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] [ 793.406017] env[69367]: ERROR nova.compute.manager [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] Traceback (most recent call last): [ 793.406017] env[69367]: ERROR nova.compute.manager [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 793.406017] env[69367]: ERROR nova.compute.manager [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] self._delete_instance(context, instance, bdms) [ 793.406017] env[69367]: ERROR nova.compute.manager [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 793.406017] env[69367]: ERROR nova.compute.manager [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] self._complete_deletion(context, instance) [ 793.406292] env[69367]: ERROR nova.compute.manager [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 793.406292] env[69367]: ERROR nova.compute.manager [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] self._update_resource_tracker(context, instance) [ 793.406292] env[69367]: ERROR nova.compute.manager [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 793.406292] env[69367]: ERROR nova.compute.manager [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] self.rt.update_usage(context, instance, instance.node) [ 793.406292] env[69367]: ERROR nova.compute.manager [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 793.406292] env[69367]: ERROR nova.compute.manager [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] return f(*args, **kwargs) [ 793.406292] env[69367]: ERROR nova.compute.manager [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 793.406292] env[69367]: ERROR nova.compute.manager [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] self._update(context.elevated(), self.compute_nodes[nodename]) [ 793.406292] env[69367]: ERROR nova.compute.manager [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 793.406292] env[69367]: ERROR nova.compute.manager [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] self._update_to_placement(context, compute_node, startup) [ 793.406292] env[69367]: ERROR nova.compute.manager [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 793.406292] env[69367]: ERROR nova.compute.manager [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 793.406604] env[69367]: ERROR nova.compute.manager [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 793.406604] env[69367]: ERROR nova.compute.manager [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] return attempt.get(self._wrap_exception) [ 793.406604] env[69367]: ERROR nova.compute.manager [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 793.406604] env[69367]: ERROR nova.compute.manager [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] six.reraise(self.value[0], self.value[1], self.value[2]) [ 793.406604] env[69367]: ERROR nova.compute.manager [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 793.406604] env[69367]: ERROR nova.compute.manager [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] raise value [ 793.406604] env[69367]: ERROR nova.compute.manager [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 793.406604] env[69367]: ERROR nova.compute.manager [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 793.406604] env[69367]: ERROR nova.compute.manager [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 793.406604] env[69367]: ERROR nova.compute.manager [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] self.reportclient.update_from_provider_tree( [ 793.406604] env[69367]: ERROR nova.compute.manager [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 793.406604] env[69367]: ERROR nova.compute.manager [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] with catch_all(pd.uuid): [ 793.406604] env[69367]: ERROR nova.compute.manager [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 793.407550] env[69367]: ERROR nova.compute.manager [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] self.gen.throw(typ, value, traceback) [ 793.407550] env[69367]: ERROR nova.compute.manager [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 793.407550] env[69367]: ERROR nova.compute.manager [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] raise exception.ResourceProviderSyncFailed() [ 793.407550] env[69367]: ERROR nova.compute.manager [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 793.407550] env[69367]: ERROR nova.compute.manager [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] [ 793.408678] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a085c230-35c0-4225-8a3b-f135e0165425 tempest-VolumesAdminNegativeTest-6889650 tempest-VolumesAdminNegativeTest-6889650-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.148s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 793.409450] env[69367]: INFO nova.compute.claims [None req-a085c230-35c0-4225-8a3b-f135e0165425 tempest-VolumesAdminNegativeTest-6889650 tempest-VolumesAdminNegativeTest-6889650-project-member] [instance: d7009e78-b9f4-47e8-ba29-dfc710bef8ad] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 793.500671] env[69367]: DEBUG nova.network.neutron [None req-515f9e32-548d-413f-84ea-987c928671cb tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] [instance: 8ee84a56-cc49-4056-b561-aa1f2b10a06c] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 793.889867] env[69367]: DEBUG nova.compute.manager [None req-6d85fd21-9fe9-4ff3-9e1e-024b8ac24ad1 tempest-ServerShowV254Test-432952848 tempest-ServerShowV254Test-432952848-project-member] [instance: 484ce161-5686-4573-8eed-4ebb3505e843] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 793.914297] env[69367]: DEBUG oslo_concurrency.lockutils [None req-6853d6a0-2173-46e7-8542-239f48c4a2c9 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Lock "e1c7d100-4ad7-4871-970f-bb7562bfc6fc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.404s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 794.004622] env[69367]: INFO nova.compute.manager [None req-515f9e32-548d-413f-84ea-987c928671cb tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] [instance: 8ee84a56-cc49-4056-b561-aa1f2b10a06c] Took 1.03 seconds to deallocate network for instance. [ 794.419681] env[69367]: DEBUG oslo_concurrency.lockutils [None req-6d85fd21-9fe9-4ff3-9e1e-024b8ac24ad1 tempest-ServerShowV254Test-432952848 tempest-ServerShowV254Test-432952848-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 794.448701] env[69367]: DEBUG nova.scheduler.client.report [None req-a085c230-35c0-4225-8a3b-f135e0165425 tempest-VolumesAdminNegativeTest-6889650 tempest-VolumesAdminNegativeTest-6889650-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 794.463645] env[69367]: DEBUG nova.scheduler.client.report [None req-a085c230-35c0-4225-8a3b-f135e0165425 tempest-VolumesAdminNegativeTest-6889650 tempest-VolumesAdminNegativeTest-6889650-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 794.463887] env[69367]: DEBUG nova.compute.provider_tree [None req-a085c230-35c0-4225-8a3b-f135e0165425 tempest-VolumesAdminNegativeTest-6889650 tempest-VolumesAdminNegativeTest-6889650-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 794.476031] env[69367]: DEBUG nova.scheduler.client.report [None req-a085c230-35c0-4225-8a3b-f135e0165425 tempest-VolumesAdminNegativeTest-6889650 tempest-VolumesAdminNegativeTest-6889650-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 794.496934] env[69367]: DEBUG nova.scheduler.client.report [None req-a085c230-35c0-4225-8a3b-f135e0165425 tempest-VolumesAdminNegativeTest-6889650 tempest-VolumesAdminNegativeTest-6889650-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 794.919761] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a5d45bf-2abd-4d3c-8a6e-7ee6b5d6bbe6 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.933712] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49b62db4-e0b7-404f-803e-d6c7c2e88eb8 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.976135] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6030d61-fb5d-44af-8e59-ab64c328c5be {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.986567] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fe24fe9-f62b-4eb8-af44-ac01634c32c7 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.004465] env[69367]: DEBUG nova.compute.provider_tree [None req-a085c230-35c0-4225-8a3b-f135e0165425 tempest-VolumesAdminNegativeTest-6889650 tempest-VolumesAdminNegativeTest-6889650-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 795.034649] env[69367]: INFO nova.scheduler.client.report [None req-515f9e32-548d-413f-84ea-987c928671cb tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] Deleted allocations for instance 8ee84a56-cc49-4056-b561-aa1f2b10a06c [ 795.247580] env[69367]: DEBUG oslo_concurrency.lockutils [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Acquiring lock "c272b0ae-6313-46ab-977c-6de255e77675" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 795.248827] env[69367]: DEBUG oslo_concurrency.lockutils [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Lock "c272b0ae-6313-46ab-977c-6de255e77675" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 795.445340] env[69367]: DEBUG oslo_concurrency.lockutils [None req-6853d6a0-2173-46e7-8542-239f48c4a2c9 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 795.531023] env[69367]: ERROR nova.scheduler.client.report [None req-a085c230-35c0-4225-8a3b-f135e0165425 tempest-VolumesAdminNegativeTest-6889650 tempest-VolumesAdminNegativeTest-6889650-project-member] [req-230a0a85-3cdd-4c60-8a2f-1c0187015249] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-230a0a85-3cdd-4c60-8a2f-1c0187015249"}]} [ 795.531023] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a085c230-35c0-4225-8a3b-f135e0165425 tempest-VolumesAdminNegativeTest-6889650 tempest-VolumesAdminNegativeTest-6889650-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.121s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 795.531384] env[69367]: ERROR nova.compute.manager [None req-a085c230-35c0-4225-8a3b-f135e0165425 tempest-VolumesAdminNegativeTest-6889650 tempest-VolumesAdminNegativeTest-6889650-project-member] [instance: d7009e78-b9f4-47e8-ba29-dfc710bef8ad] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 795.531384] env[69367]: ERROR nova.compute.manager [instance: d7009e78-b9f4-47e8-ba29-dfc710bef8ad] Traceback (most recent call last): [ 795.531384] env[69367]: ERROR nova.compute.manager [instance: d7009e78-b9f4-47e8-ba29-dfc710bef8ad] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 795.531384] env[69367]: ERROR nova.compute.manager [instance: d7009e78-b9f4-47e8-ba29-dfc710bef8ad] yield [ 795.531384] env[69367]: ERROR nova.compute.manager [instance: d7009e78-b9f4-47e8-ba29-dfc710bef8ad] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 795.531384] env[69367]: ERROR nova.compute.manager [instance: d7009e78-b9f4-47e8-ba29-dfc710bef8ad] self.set_inventory_for_provider( [ 795.531384] env[69367]: ERROR nova.compute.manager [instance: d7009e78-b9f4-47e8-ba29-dfc710bef8ad] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 795.531384] env[69367]: ERROR nova.compute.manager [instance: d7009e78-b9f4-47e8-ba29-dfc710bef8ad] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 795.531640] env[69367]: ERROR nova.compute.manager [instance: d7009e78-b9f4-47e8-ba29-dfc710bef8ad] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-230a0a85-3cdd-4c60-8a2f-1c0187015249"}]} [ 795.531640] env[69367]: ERROR nova.compute.manager [instance: d7009e78-b9f4-47e8-ba29-dfc710bef8ad] [ 795.531640] env[69367]: ERROR nova.compute.manager [instance: d7009e78-b9f4-47e8-ba29-dfc710bef8ad] During handling of the above exception, another exception occurred: [ 795.531640] env[69367]: ERROR nova.compute.manager [instance: d7009e78-b9f4-47e8-ba29-dfc710bef8ad] [ 795.531640] env[69367]: ERROR nova.compute.manager [instance: d7009e78-b9f4-47e8-ba29-dfc710bef8ad] Traceback (most recent call last): [ 795.531640] env[69367]: ERROR nova.compute.manager [instance: d7009e78-b9f4-47e8-ba29-dfc710bef8ad] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 795.531640] env[69367]: ERROR nova.compute.manager [instance: d7009e78-b9f4-47e8-ba29-dfc710bef8ad] with self.rt.instance_claim(context, instance, node, allocs, [ 795.531640] env[69367]: ERROR nova.compute.manager [instance: d7009e78-b9f4-47e8-ba29-dfc710bef8ad] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 795.531640] env[69367]: ERROR nova.compute.manager [instance: d7009e78-b9f4-47e8-ba29-dfc710bef8ad] return f(*args, **kwargs) [ 795.531960] env[69367]: ERROR nova.compute.manager [instance: d7009e78-b9f4-47e8-ba29-dfc710bef8ad] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 795.531960] env[69367]: ERROR nova.compute.manager [instance: d7009e78-b9f4-47e8-ba29-dfc710bef8ad] self._update(elevated, cn) [ 795.531960] env[69367]: ERROR nova.compute.manager [instance: d7009e78-b9f4-47e8-ba29-dfc710bef8ad] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 795.531960] env[69367]: ERROR nova.compute.manager [instance: d7009e78-b9f4-47e8-ba29-dfc710bef8ad] self._update_to_placement(context, compute_node, startup) [ 795.531960] env[69367]: ERROR nova.compute.manager [instance: d7009e78-b9f4-47e8-ba29-dfc710bef8ad] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 795.531960] env[69367]: ERROR nova.compute.manager [instance: d7009e78-b9f4-47e8-ba29-dfc710bef8ad] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 795.531960] env[69367]: ERROR nova.compute.manager [instance: d7009e78-b9f4-47e8-ba29-dfc710bef8ad] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 795.531960] env[69367]: ERROR nova.compute.manager [instance: d7009e78-b9f4-47e8-ba29-dfc710bef8ad] return attempt.get(self._wrap_exception) [ 795.531960] env[69367]: ERROR nova.compute.manager [instance: d7009e78-b9f4-47e8-ba29-dfc710bef8ad] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 795.531960] env[69367]: ERROR nova.compute.manager [instance: d7009e78-b9f4-47e8-ba29-dfc710bef8ad] six.reraise(self.value[0], self.value[1], self.value[2]) [ 795.531960] env[69367]: ERROR nova.compute.manager [instance: d7009e78-b9f4-47e8-ba29-dfc710bef8ad] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 795.531960] env[69367]: ERROR nova.compute.manager [instance: d7009e78-b9f4-47e8-ba29-dfc710bef8ad] raise value [ 795.531960] env[69367]: ERROR nova.compute.manager [instance: d7009e78-b9f4-47e8-ba29-dfc710bef8ad] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 795.532404] env[69367]: ERROR nova.compute.manager [instance: d7009e78-b9f4-47e8-ba29-dfc710bef8ad] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 795.532404] env[69367]: ERROR nova.compute.manager [instance: d7009e78-b9f4-47e8-ba29-dfc710bef8ad] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 795.532404] env[69367]: ERROR nova.compute.manager [instance: d7009e78-b9f4-47e8-ba29-dfc710bef8ad] self.reportclient.update_from_provider_tree( [ 795.532404] env[69367]: ERROR nova.compute.manager [instance: d7009e78-b9f4-47e8-ba29-dfc710bef8ad] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 795.532404] env[69367]: ERROR nova.compute.manager [instance: d7009e78-b9f4-47e8-ba29-dfc710bef8ad] with catch_all(pd.uuid): [ 795.532404] env[69367]: ERROR nova.compute.manager [instance: d7009e78-b9f4-47e8-ba29-dfc710bef8ad] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 795.532404] env[69367]: ERROR nova.compute.manager [instance: d7009e78-b9f4-47e8-ba29-dfc710bef8ad] self.gen.throw(typ, value, traceback) [ 795.532404] env[69367]: ERROR nova.compute.manager [instance: d7009e78-b9f4-47e8-ba29-dfc710bef8ad] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 795.532404] env[69367]: ERROR nova.compute.manager [instance: d7009e78-b9f4-47e8-ba29-dfc710bef8ad] raise exception.ResourceProviderSyncFailed() [ 795.532404] env[69367]: ERROR nova.compute.manager [instance: d7009e78-b9f4-47e8-ba29-dfc710bef8ad] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 795.532404] env[69367]: ERROR nova.compute.manager [instance: d7009e78-b9f4-47e8-ba29-dfc710bef8ad] [ 795.532826] env[69367]: DEBUG nova.compute.utils [None req-a085c230-35c0-4225-8a3b-f135e0165425 tempest-VolumesAdminNegativeTest-6889650 tempest-VolumesAdminNegativeTest-6889650-project-member] [instance: d7009e78-b9f4-47e8-ba29-dfc710bef8ad] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 795.535034] env[69367]: DEBUG oslo_concurrency.lockutils [None req-71e59610-5b9a-47a8-a1c2-ad48771fbdde tempest-ServerShowV247Test-1137483895 tempest-ServerShowV247Test-1137483895-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.865s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 795.535770] env[69367]: INFO nova.compute.claims [None req-71e59610-5b9a-47a8-a1c2-ad48771fbdde tempest-ServerShowV247Test-1137483895 tempest-ServerShowV247Test-1137483895-project-member] [instance: f0522b69-b593-404e-8f24-b6c5c6c8b2e4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 795.538271] env[69367]: DEBUG nova.compute.manager [None req-a085c230-35c0-4225-8a3b-f135e0165425 tempest-VolumesAdminNegativeTest-6889650 tempest-VolumesAdminNegativeTest-6889650-project-member] [instance: d7009e78-b9f4-47e8-ba29-dfc710bef8ad] Build of instance d7009e78-b9f4-47e8-ba29-dfc710bef8ad was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 795.539062] env[69367]: DEBUG nova.compute.manager [None req-a085c230-35c0-4225-8a3b-f135e0165425 tempest-VolumesAdminNegativeTest-6889650 tempest-VolumesAdminNegativeTest-6889650-project-member] [instance: d7009e78-b9f4-47e8-ba29-dfc710bef8ad] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 795.539062] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a085c230-35c0-4225-8a3b-f135e0165425 tempest-VolumesAdminNegativeTest-6889650 tempest-VolumesAdminNegativeTest-6889650-project-member] Acquiring lock "refresh_cache-d7009e78-b9f4-47e8-ba29-dfc710bef8ad" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 795.539174] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a085c230-35c0-4225-8a3b-f135e0165425 tempest-VolumesAdminNegativeTest-6889650 tempest-VolumesAdminNegativeTest-6889650-project-member] Acquired lock "refresh_cache-d7009e78-b9f4-47e8-ba29-dfc710bef8ad" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 795.539236] env[69367]: DEBUG nova.network.neutron [None req-a085c230-35c0-4225-8a3b-f135e0165425 tempest-VolumesAdminNegativeTest-6889650 tempest-VolumesAdminNegativeTest-6889650-project-member] [instance: d7009e78-b9f4-47e8-ba29-dfc710bef8ad] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 795.548256] env[69367]: DEBUG oslo_concurrency.lockutils [None req-515f9e32-548d-413f-84ea-987c928671cb tempest-ListServerFiltersTestJSON-983988626 tempest-ListServerFiltersTestJSON-983988626-project-member] Lock "8ee84a56-cc49-4056-b561-aa1f2b10a06c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 68.659s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 796.050640] env[69367]: DEBUG nova.compute.manager [None req-3d051d46-95b5-458b-8d77-42673c6fbe04 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: bb59f765-0d86-4803-845c-8186e9341702] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 796.066612] env[69367]: DEBUG nova.network.neutron [None req-a085c230-35c0-4225-8a3b-f135e0165425 tempest-VolumesAdminNegativeTest-6889650 tempest-VolumesAdminNegativeTest-6889650-project-member] [instance: d7009e78-b9f4-47e8-ba29-dfc710bef8ad] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 796.169635] env[69367]: DEBUG nova.network.neutron [None req-a085c230-35c0-4225-8a3b-f135e0165425 tempest-VolumesAdminNegativeTest-6889650 tempest-VolumesAdminNegativeTest-6889650-project-member] [instance: d7009e78-b9f4-47e8-ba29-dfc710bef8ad] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 796.577421] env[69367]: DEBUG nova.scheduler.client.report [None req-71e59610-5b9a-47a8-a1c2-ad48771fbdde tempest-ServerShowV247Test-1137483895 tempest-ServerShowV247Test-1137483895-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 796.582829] env[69367]: DEBUG oslo_concurrency.lockutils [None req-3d051d46-95b5-458b-8d77-42673c6fbe04 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 796.597291] env[69367]: DEBUG nova.scheduler.client.report [None req-71e59610-5b9a-47a8-a1c2-ad48771fbdde tempest-ServerShowV247Test-1137483895 tempest-ServerShowV247Test-1137483895-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 796.597503] env[69367]: DEBUG nova.compute.provider_tree [None req-71e59610-5b9a-47a8-a1c2-ad48771fbdde tempest-ServerShowV247Test-1137483895 tempest-ServerShowV247Test-1137483895-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 796.616796] env[69367]: DEBUG nova.scheduler.client.report [None req-71e59610-5b9a-47a8-a1c2-ad48771fbdde tempest-ServerShowV247Test-1137483895 tempest-ServerShowV247Test-1137483895-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 796.636532] env[69367]: DEBUG nova.scheduler.client.report [None req-71e59610-5b9a-47a8-a1c2-ad48771fbdde tempest-ServerShowV247Test-1137483895 tempest-ServerShowV247Test-1137483895-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 796.676236] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a085c230-35c0-4225-8a3b-f135e0165425 tempest-VolumesAdminNegativeTest-6889650 tempest-VolumesAdminNegativeTest-6889650-project-member] Releasing lock "refresh_cache-d7009e78-b9f4-47e8-ba29-dfc710bef8ad" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 796.677031] env[69367]: DEBUG nova.compute.manager [None req-a085c230-35c0-4225-8a3b-f135e0165425 tempest-VolumesAdminNegativeTest-6889650 tempest-VolumesAdminNegativeTest-6889650-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 796.677031] env[69367]: DEBUG nova.compute.manager [None req-a085c230-35c0-4225-8a3b-f135e0165425 tempest-VolumesAdminNegativeTest-6889650 tempest-VolumesAdminNegativeTest-6889650-project-member] [instance: d7009e78-b9f4-47e8-ba29-dfc710bef8ad] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 796.677031] env[69367]: DEBUG nova.network.neutron [None req-a085c230-35c0-4225-8a3b-f135e0165425 tempest-VolumesAdminNegativeTest-6889650 tempest-VolumesAdminNegativeTest-6889650-project-member] [instance: d7009e78-b9f4-47e8-ba29-dfc710bef8ad] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 796.696397] env[69367]: DEBUG nova.network.neutron [None req-a085c230-35c0-4225-8a3b-f135e0165425 tempest-VolumesAdminNegativeTest-6889650 tempest-VolumesAdminNegativeTest-6889650-project-member] [instance: d7009e78-b9f4-47e8-ba29-dfc710bef8ad] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 797.082060] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4a9d2ca-d7cc-4bc0-9105-4c89f75a60aa {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.091401] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56ef9aa8-11b1-4e81-8c83-637ca2f5a8f5 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.131929] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b938ebda-635a-469d-b435-bf48488bc1e4 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.140616] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-350ab145-105a-427f-b508-87871e74678b {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.157849] env[69367]: DEBUG nova.compute.provider_tree [None req-71e59610-5b9a-47a8-a1c2-ad48771fbdde tempest-ServerShowV247Test-1137483895 tempest-ServerShowV247Test-1137483895-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 797.200086] env[69367]: DEBUG nova.network.neutron [None req-a085c230-35c0-4225-8a3b-f135e0165425 tempest-VolumesAdminNegativeTest-6889650 tempest-VolumesAdminNegativeTest-6889650-project-member] [instance: d7009e78-b9f4-47e8-ba29-dfc710bef8ad] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 797.683366] env[69367]: ERROR nova.scheduler.client.report [None req-71e59610-5b9a-47a8-a1c2-ad48771fbdde tempest-ServerShowV247Test-1137483895 tempest-ServerShowV247Test-1137483895-project-member] [req-f6e0c590-acca-487e-ad50-29b5d04df0d8] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-f6e0c590-acca-487e-ad50-29b5d04df0d8"}]} [ 797.683860] env[69367]: DEBUG oslo_concurrency.lockutils [None req-71e59610-5b9a-47a8-a1c2-ad48771fbdde tempest-ServerShowV247Test-1137483895 tempest-ServerShowV247Test-1137483895-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.150s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 797.684557] env[69367]: ERROR nova.compute.manager [None req-71e59610-5b9a-47a8-a1c2-ad48771fbdde tempest-ServerShowV247Test-1137483895 tempest-ServerShowV247Test-1137483895-project-member] [instance: f0522b69-b593-404e-8f24-b6c5c6c8b2e4] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 797.684557] env[69367]: ERROR nova.compute.manager [instance: f0522b69-b593-404e-8f24-b6c5c6c8b2e4] Traceback (most recent call last): [ 797.684557] env[69367]: ERROR nova.compute.manager [instance: f0522b69-b593-404e-8f24-b6c5c6c8b2e4] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 797.684557] env[69367]: ERROR nova.compute.manager [instance: f0522b69-b593-404e-8f24-b6c5c6c8b2e4] yield [ 797.684557] env[69367]: ERROR nova.compute.manager [instance: f0522b69-b593-404e-8f24-b6c5c6c8b2e4] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 797.684557] env[69367]: ERROR nova.compute.manager [instance: f0522b69-b593-404e-8f24-b6c5c6c8b2e4] self.set_inventory_for_provider( [ 797.684557] env[69367]: ERROR nova.compute.manager [instance: f0522b69-b593-404e-8f24-b6c5c6c8b2e4] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 797.684557] env[69367]: ERROR nova.compute.manager [instance: f0522b69-b593-404e-8f24-b6c5c6c8b2e4] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 797.684813] env[69367]: ERROR nova.compute.manager [instance: f0522b69-b593-404e-8f24-b6c5c6c8b2e4] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-f6e0c590-acca-487e-ad50-29b5d04df0d8"}]} [ 797.684813] env[69367]: ERROR nova.compute.manager [instance: f0522b69-b593-404e-8f24-b6c5c6c8b2e4] [ 797.684813] env[69367]: ERROR nova.compute.manager [instance: f0522b69-b593-404e-8f24-b6c5c6c8b2e4] During handling of the above exception, another exception occurred: [ 797.684813] env[69367]: ERROR nova.compute.manager [instance: f0522b69-b593-404e-8f24-b6c5c6c8b2e4] [ 797.684813] env[69367]: ERROR nova.compute.manager [instance: f0522b69-b593-404e-8f24-b6c5c6c8b2e4] Traceback (most recent call last): [ 797.684813] env[69367]: ERROR nova.compute.manager [instance: f0522b69-b593-404e-8f24-b6c5c6c8b2e4] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 797.684813] env[69367]: ERROR nova.compute.manager [instance: f0522b69-b593-404e-8f24-b6c5c6c8b2e4] with self.rt.instance_claim(context, instance, node, allocs, [ 797.684813] env[69367]: ERROR nova.compute.manager [instance: f0522b69-b593-404e-8f24-b6c5c6c8b2e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 797.684813] env[69367]: ERROR nova.compute.manager [instance: f0522b69-b593-404e-8f24-b6c5c6c8b2e4] return f(*args, **kwargs) [ 797.685123] env[69367]: ERROR nova.compute.manager [instance: f0522b69-b593-404e-8f24-b6c5c6c8b2e4] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 797.685123] env[69367]: ERROR nova.compute.manager [instance: f0522b69-b593-404e-8f24-b6c5c6c8b2e4] self._update(elevated, cn) [ 797.685123] env[69367]: ERROR nova.compute.manager [instance: f0522b69-b593-404e-8f24-b6c5c6c8b2e4] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 797.685123] env[69367]: ERROR nova.compute.manager [instance: f0522b69-b593-404e-8f24-b6c5c6c8b2e4] self._update_to_placement(context, compute_node, startup) [ 797.685123] env[69367]: ERROR nova.compute.manager [instance: f0522b69-b593-404e-8f24-b6c5c6c8b2e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 797.685123] env[69367]: ERROR nova.compute.manager [instance: f0522b69-b593-404e-8f24-b6c5c6c8b2e4] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 797.685123] env[69367]: ERROR nova.compute.manager [instance: f0522b69-b593-404e-8f24-b6c5c6c8b2e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 797.685123] env[69367]: ERROR nova.compute.manager [instance: f0522b69-b593-404e-8f24-b6c5c6c8b2e4] return attempt.get(self._wrap_exception) [ 797.685123] env[69367]: ERROR nova.compute.manager [instance: f0522b69-b593-404e-8f24-b6c5c6c8b2e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 797.685123] env[69367]: ERROR nova.compute.manager [instance: f0522b69-b593-404e-8f24-b6c5c6c8b2e4] six.reraise(self.value[0], self.value[1], self.value[2]) [ 797.685123] env[69367]: ERROR nova.compute.manager [instance: f0522b69-b593-404e-8f24-b6c5c6c8b2e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 797.685123] env[69367]: ERROR nova.compute.manager [instance: f0522b69-b593-404e-8f24-b6c5c6c8b2e4] raise value [ 797.685123] env[69367]: ERROR nova.compute.manager [instance: f0522b69-b593-404e-8f24-b6c5c6c8b2e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 797.685520] env[69367]: ERROR nova.compute.manager [instance: f0522b69-b593-404e-8f24-b6c5c6c8b2e4] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 797.685520] env[69367]: ERROR nova.compute.manager [instance: f0522b69-b593-404e-8f24-b6c5c6c8b2e4] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 797.685520] env[69367]: ERROR nova.compute.manager [instance: f0522b69-b593-404e-8f24-b6c5c6c8b2e4] self.reportclient.update_from_provider_tree( [ 797.685520] env[69367]: ERROR nova.compute.manager [instance: f0522b69-b593-404e-8f24-b6c5c6c8b2e4] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 797.685520] env[69367]: ERROR nova.compute.manager [instance: f0522b69-b593-404e-8f24-b6c5c6c8b2e4] with catch_all(pd.uuid): [ 797.685520] env[69367]: ERROR nova.compute.manager [instance: f0522b69-b593-404e-8f24-b6c5c6c8b2e4] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 797.685520] env[69367]: ERROR nova.compute.manager [instance: f0522b69-b593-404e-8f24-b6c5c6c8b2e4] self.gen.throw(typ, value, traceback) [ 797.685520] env[69367]: ERROR nova.compute.manager [instance: f0522b69-b593-404e-8f24-b6c5c6c8b2e4] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 797.685520] env[69367]: ERROR nova.compute.manager [instance: f0522b69-b593-404e-8f24-b6c5c6c8b2e4] raise exception.ResourceProviderSyncFailed() [ 797.685520] env[69367]: ERROR nova.compute.manager [instance: f0522b69-b593-404e-8f24-b6c5c6c8b2e4] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 797.685520] env[69367]: ERROR nova.compute.manager [instance: f0522b69-b593-404e-8f24-b6c5c6c8b2e4] [ 797.685829] env[69367]: DEBUG nova.compute.utils [None req-71e59610-5b9a-47a8-a1c2-ad48771fbdde tempest-ServerShowV247Test-1137483895 tempest-ServerShowV247Test-1137483895-project-member] [instance: f0522b69-b593-404e-8f24-b6c5c6c8b2e4] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 797.687811] env[69367]: DEBUG nova.compute.manager [None req-71e59610-5b9a-47a8-a1c2-ad48771fbdde tempest-ServerShowV247Test-1137483895 tempest-ServerShowV247Test-1137483895-project-member] [instance: f0522b69-b593-404e-8f24-b6c5c6c8b2e4] Build of instance f0522b69-b593-404e-8f24-b6c5c6c8b2e4 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 797.689149] env[69367]: DEBUG nova.compute.manager [None req-71e59610-5b9a-47a8-a1c2-ad48771fbdde tempest-ServerShowV247Test-1137483895 tempest-ServerShowV247Test-1137483895-project-member] [instance: f0522b69-b593-404e-8f24-b6c5c6c8b2e4] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 797.689149] env[69367]: DEBUG oslo_concurrency.lockutils [None req-71e59610-5b9a-47a8-a1c2-ad48771fbdde tempest-ServerShowV247Test-1137483895 tempest-ServerShowV247Test-1137483895-project-member] Acquiring lock "refresh_cache-f0522b69-b593-404e-8f24-b6c5c6c8b2e4" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 797.689149] env[69367]: DEBUG oslo_concurrency.lockutils [None req-71e59610-5b9a-47a8-a1c2-ad48771fbdde tempest-ServerShowV247Test-1137483895 tempest-ServerShowV247Test-1137483895-project-member] Acquired lock "refresh_cache-f0522b69-b593-404e-8f24-b6c5c6c8b2e4" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 797.689149] env[69367]: DEBUG nova.network.neutron [None req-71e59610-5b9a-47a8-a1c2-ad48771fbdde tempest-ServerShowV247Test-1137483895 tempest-ServerShowV247Test-1137483895-project-member] [instance: f0522b69-b593-404e-8f24-b6c5c6c8b2e4] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 797.690476] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a1156e1e-41ad-40b1-a520-164694ed7948 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.356s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 797.690690] env[69367]: DEBUG nova.objects.instance [None req-a1156e1e-41ad-40b1-a520-164694ed7948 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Lazy-loading 'pci_requests' on Instance uuid 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa {{(pid=69367) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 797.706034] env[69367]: INFO nova.compute.manager [None req-a085c230-35c0-4225-8a3b-f135e0165425 tempest-VolumesAdminNegativeTest-6889650 tempest-VolumesAdminNegativeTest-6889650-project-member] [instance: d7009e78-b9f4-47e8-ba29-dfc710bef8ad] Took 1.03 seconds to deallocate network for instance. [ 798.197036] env[69367]: DEBUG nova.objects.instance [None req-a1156e1e-41ad-40b1-a520-164694ed7948 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Lazy-loading 'numa_topology' on Instance uuid 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa {{(pid=69367) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 798.229944] env[69367]: DEBUG nova.network.neutron [None req-71e59610-5b9a-47a8-a1c2-ad48771fbdde tempest-ServerShowV247Test-1137483895 tempest-ServerShowV247Test-1137483895-project-member] [instance: f0522b69-b593-404e-8f24-b6c5c6c8b2e4] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 798.296618] env[69367]: DEBUG nova.network.neutron [None req-71e59610-5b9a-47a8-a1c2-ad48771fbdde tempest-ServerShowV247Test-1137483895 tempest-ServerShowV247Test-1137483895-project-member] [instance: f0522b69-b593-404e-8f24-b6c5c6c8b2e4] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 798.701066] env[69367]: INFO nova.compute.claims [None req-a1156e1e-41ad-40b1-a520-164694ed7948 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 798.747433] env[69367]: INFO nova.scheduler.client.report [None req-a085c230-35c0-4225-8a3b-f135e0165425 tempest-VolumesAdminNegativeTest-6889650 tempest-VolumesAdminNegativeTest-6889650-project-member] Deleted allocations for instance d7009e78-b9f4-47e8-ba29-dfc710bef8ad [ 798.800259] env[69367]: DEBUG oslo_concurrency.lockutils [None req-71e59610-5b9a-47a8-a1c2-ad48771fbdde tempest-ServerShowV247Test-1137483895 tempest-ServerShowV247Test-1137483895-project-member] Releasing lock "refresh_cache-f0522b69-b593-404e-8f24-b6c5c6c8b2e4" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 798.800525] env[69367]: DEBUG nova.compute.manager [None req-71e59610-5b9a-47a8-a1c2-ad48771fbdde tempest-ServerShowV247Test-1137483895 tempest-ServerShowV247Test-1137483895-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 798.800710] env[69367]: DEBUG nova.compute.manager [None req-71e59610-5b9a-47a8-a1c2-ad48771fbdde tempest-ServerShowV247Test-1137483895 tempest-ServerShowV247Test-1137483895-project-member] [instance: f0522b69-b593-404e-8f24-b6c5c6c8b2e4] Skipping network deallocation for instance since networking was not requested. {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 799.236031] env[69367]: DEBUG nova.scheduler.client.report [None req-a1156e1e-41ad-40b1-a520-164694ed7948 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 799.263329] env[69367]: DEBUG nova.scheduler.client.report [None req-a1156e1e-41ad-40b1-a520-164694ed7948 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 799.263554] env[69367]: DEBUG nova.compute.provider_tree [None req-a1156e1e-41ad-40b1-a520-164694ed7948 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 799.266381] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a085c230-35c0-4225-8a3b-f135e0165425 tempest-VolumesAdminNegativeTest-6889650 tempest-VolumesAdminNegativeTest-6889650-project-member] Lock "d7009e78-b9f4-47e8-ba29-dfc710bef8ad" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 66.813s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 799.283067] env[69367]: DEBUG nova.scheduler.client.report [None req-a1156e1e-41ad-40b1-a520-164694ed7948 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 799.304678] env[69367]: DEBUG nova.scheduler.client.report [None req-a1156e1e-41ad-40b1-a520-164694ed7948 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 799.769496] env[69367]: DEBUG nova.compute.manager [None req-fbe32c85-05fd-4b80-8aa6-4b5adf69b91f tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: 69d2e230-1c19-4a76-a517-ee7c77854f5c] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 799.825863] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-431ceb11-cc7c-42b3-9505-1c1305a8a58c {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.837529] env[69367]: INFO nova.scheduler.client.report [None req-71e59610-5b9a-47a8-a1c2-ad48771fbdde tempest-ServerShowV247Test-1137483895 tempest-ServerShowV247Test-1137483895-project-member] Deleted allocations for instance f0522b69-b593-404e-8f24-b6c5c6c8b2e4 [ 799.848938] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e7bc920-d7f0-4f35-93be-776e7b450513 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.885880] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02862e23-5776-444e-b508-5b315e9c9955 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.894491] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df2805fa-61af-456d-bd2a-b4ef42b601ec {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.910513] env[69367]: DEBUG nova.compute.provider_tree [None req-a1156e1e-41ad-40b1-a520-164694ed7948 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 800.307648] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fbe32c85-05fd-4b80-8aa6-4b5adf69b91f tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 800.348489] env[69367]: DEBUG oslo_concurrency.lockutils [None req-71e59610-5b9a-47a8-a1c2-ad48771fbdde tempest-ServerShowV247Test-1137483895 tempest-ServerShowV247Test-1137483895-project-member] Lock "f0522b69-b593-404e-8f24-b6c5c6c8b2e4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 65.728s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 800.440164] env[69367]: ERROR nova.scheduler.client.report [None req-a1156e1e-41ad-40b1-a520-164694ed7948 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [req-f5a92b48-5dd4-46fc-9635-2c0a10ca63c8] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-f5a92b48-5dd4-46fc-9635-2c0a10ca63c8"}]} [ 800.444065] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a1156e1e-41ad-40b1-a520-164694ed7948 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.751s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 800.444065] env[69367]: ERROR nova.compute.manager [None req-a1156e1e-41ad-40b1-a520-164694ed7948 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Instance failed to spawn: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 800.444065] env[69367]: ERROR nova.compute.manager [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Traceback (most recent call last): [ 800.444065] env[69367]: ERROR nova.compute.manager [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 800.444065] env[69367]: ERROR nova.compute.manager [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] yield [ 800.444065] env[69367]: ERROR nova.compute.manager [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 800.444065] env[69367]: ERROR nova.compute.manager [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] self.set_inventory_for_provider( [ 800.444065] env[69367]: ERROR nova.compute.manager [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 800.444065] env[69367]: ERROR nova.compute.manager [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 800.444560] env[69367]: ERROR nova.compute.manager [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-f5a92b48-5dd4-46fc-9635-2c0a10ca63c8"}]} [ 800.444560] env[69367]: ERROR nova.compute.manager [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] [ 800.444560] env[69367]: ERROR nova.compute.manager [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] During handling of the above exception, another exception occurred: [ 800.444560] env[69367]: ERROR nova.compute.manager [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] [ 800.444560] env[69367]: ERROR nova.compute.manager [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Traceback (most recent call last): [ 800.444560] env[69367]: ERROR nova.compute.manager [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] File "/opt/stack/nova/nova/compute/manager.py", line 7682, in _unshelve_instance [ 800.444560] env[69367]: ERROR nova.compute.manager [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] with self.rt.instance_claim(context, instance, node, allocations, [ 800.444560] env[69367]: ERROR nova.compute.manager [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 800.444560] env[69367]: ERROR nova.compute.manager [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] return f(*args, **kwargs) [ 800.444849] env[69367]: ERROR nova.compute.manager [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 800.444849] env[69367]: ERROR nova.compute.manager [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] self._update(elevated, cn) [ 800.444849] env[69367]: ERROR nova.compute.manager [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 800.444849] env[69367]: ERROR nova.compute.manager [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] self._update_to_placement(context, compute_node, startup) [ 800.444849] env[69367]: ERROR nova.compute.manager [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 800.444849] env[69367]: ERROR nova.compute.manager [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 800.444849] env[69367]: ERROR nova.compute.manager [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 800.444849] env[69367]: ERROR nova.compute.manager [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] return attempt.get(self._wrap_exception) [ 800.444849] env[69367]: ERROR nova.compute.manager [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 800.444849] env[69367]: ERROR nova.compute.manager [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] six.reraise(self.value[0], self.value[1], self.value[2]) [ 800.444849] env[69367]: ERROR nova.compute.manager [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 800.444849] env[69367]: ERROR nova.compute.manager [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] raise value [ 800.444849] env[69367]: ERROR nova.compute.manager [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 800.445248] env[69367]: ERROR nova.compute.manager [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 800.445248] env[69367]: ERROR nova.compute.manager [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 800.445248] env[69367]: ERROR nova.compute.manager [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] self.reportclient.update_from_provider_tree( [ 800.445248] env[69367]: ERROR nova.compute.manager [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 800.445248] env[69367]: ERROR nova.compute.manager [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] with catch_all(pd.uuid): [ 800.445248] env[69367]: ERROR nova.compute.manager [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 800.445248] env[69367]: ERROR nova.compute.manager [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] self.gen.throw(typ, value, traceback) [ 800.445248] env[69367]: ERROR nova.compute.manager [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 800.445248] env[69367]: ERROR nova.compute.manager [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] raise exception.ResourceProviderSyncFailed() [ 800.445248] env[69367]: ERROR nova.compute.manager [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 800.445248] env[69367]: ERROR nova.compute.manager [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] [ 800.449273] env[69367]: DEBUG oslo_concurrency.lockutils [None req-c3fd4c24-68da-4e55-bd81-27f04690e5d7 tempest-ImagesOneServerTestJSON-993522534 tempest-ImagesOneServerTestJSON-993522534-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.157s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 800.449654] env[69367]: INFO nova.compute.claims [None req-c3fd4c24-68da-4e55-bd81-27f04690e5d7 tempest-ImagesOneServerTestJSON-993522534 tempest-ImagesOneServerTestJSON-993522534-project-member] [instance: 3f6a67a9-08db-4a15-ae07-bef02b9a6d48] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 800.482549] env[69367]: INFO nova.scheduler.client.report [None req-a1156e1e-41ad-40b1-a520-164694ed7948 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Deleted allocations for instance 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa [ 800.851940] env[69367]: DEBUG nova.compute.manager [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 801.077517] env[69367]: DEBUG oslo_concurrency.lockutils [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Acquiring lock "4a46d003-f57e-4089-aa60-757a4246f071" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 801.077645] env[69367]: DEBUG oslo_concurrency.lockutils [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Lock "4a46d003-f57e-4089-aa60-757a4246f071" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 801.119031] env[69367]: DEBUG oslo_concurrency.lockutils [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Acquiring lock "46b6bc45-57f0-4850-9249-6bbb22b162c6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 801.119368] env[69367]: DEBUG oslo_concurrency.lockutils [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Lock "46b6bc45-57f0-4850-9249-6bbb22b162c6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 801.166801] env[69367]: DEBUG oslo_concurrency.lockutils [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Acquiring lock "54a1f586-481d-427e-ba0b-be90e5573bd3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 801.167325] env[69367]: DEBUG oslo_concurrency.lockutils [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Lock "54a1f586-481d-427e-ba0b-be90e5573bd3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 801.313481] env[69367]: DEBUG oslo_concurrency.lockutils [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Acquiring lock "d900df05-b65c-4a45-94d1-563afbf9c022" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 801.313733] env[69367]: DEBUG oslo_concurrency.lockutils [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Lock "d900df05-b65c-4a45-94d1-563afbf9c022" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 801.381229] env[69367]: DEBUG oslo_concurrency.lockutils [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 801.479267] env[69367]: DEBUG nova.scheduler.client.report [None req-c3fd4c24-68da-4e55-bd81-27f04690e5d7 tempest-ImagesOneServerTestJSON-993522534 tempest-ImagesOneServerTestJSON-993522534-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 801.504185] env[69367]: DEBUG nova.scheduler.client.report [None req-c3fd4c24-68da-4e55-bd81-27f04690e5d7 tempest-ImagesOneServerTestJSON-993522534 tempest-ImagesOneServerTestJSON-993522534-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 801.504185] env[69367]: DEBUG nova.compute.provider_tree [None req-c3fd4c24-68da-4e55-bd81-27f04690e5d7 tempest-ImagesOneServerTestJSON-993522534 tempest-ImagesOneServerTestJSON-993522534-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 801.521589] env[69367]: DEBUG nova.scheduler.client.report [None req-c3fd4c24-68da-4e55-bd81-27f04690e5d7 tempest-ImagesOneServerTestJSON-993522534 tempest-ImagesOneServerTestJSON-993522534-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 801.547033] env[69367]: DEBUG nova.scheduler.client.report [None req-c3fd4c24-68da-4e55-bd81-27f04690e5d7 tempest-ImagesOneServerTestJSON-993522534 tempest-ImagesOneServerTestJSON-993522534-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 801.659851] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a1156e1e-41ad-40b1-a520-164694ed7948 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Lock "8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 29.675s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 802.048253] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-065e17c9-d61c-4b65-83fc-d6cf5045be28 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.058329] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db15a0a8-d262-4647-b6a9-962e4bbb1341 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.094857] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8b45791-7f16-45e8-a61e-a90f9c398e10 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.103729] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c6d302c-78f6-4503-b475-f0548e8dc916 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.118569] env[69367]: DEBUG nova.compute.provider_tree [None req-c3fd4c24-68da-4e55-bd81-27f04690e5d7 tempest-ImagesOneServerTestJSON-993522534 tempest-ImagesOneServerTestJSON-993522534-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 802.258424] env[69367]: DEBUG oslo_concurrency.lockutils [None req-85ae9545-2286-457b-9c4a-a112188816da tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Acquiring lock "interface-837b4093-308b-440b-940d-fc0227a5c590-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 802.258703] env[69367]: DEBUG oslo_concurrency.lockutils [None req-85ae9545-2286-457b-9c4a-a112188816da tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Lock "interface-837b4093-308b-440b-940d-fc0227a5c590-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 802.259078] env[69367]: DEBUG nova.objects.instance [None req-85ae9545-2286-457b-9c4a-a112188816da tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Lazy-loading 'flavor' on Instance uuid 837b4093-308b-440b-940d-fc0227a5c590 {{(pid=69367) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 802.655452] env[69367]: ERROR nova.scheduler.client.report [None req-c3fd4c24-68da-4e55-bd81-27f04690e5d7 tempest-ImagesOneServerTestJSON-993522534 tempest-ImagesOneServerTestJSON-993522534-project-member] [req-b4c38ed1-57ec-4bfc-803f-0a2104fc5aa7] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-b4c38ed1-57ec-4bfc-803f-0a2104fc5aa7"}]} [ 802.655821] env[69367]: DEBUG oslo_concurrency.lockutils [None req-c3fd4c24-68da-4e55-bd81-27f04690e5d7 tempest-ImagesOneServerTestJSON-993522534 tempest-ImagesOneServerTestJSON-993522534-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.208s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 802.656446] env[69367]: ERROR nova.compute.manager [None req-c3fd4c24-68da-4e55-bd81-27f04690e5d7 tempest-ImagesOneServerTestJSON-993522534 tempest-ImagesOneServerTestJSON-993522534-project-member] [instance: 3f6a67a9-08db-4a15-ae07-bef02b9a6d48] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 802.656446] env[69367]: ERROR nova.compute.manager [instance: 3f6a67a9-08db-4a15-ae07-bef02b9a6d48] Traceback (most recent call last): [ 802.656446] env[69367]: ERROR nova.compute.manager [instance: 3f6a67a9-08db-4a15-ae07-bef02b9a6d48] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 802.656446] env[69367]: ERROR nova.compute.manager [instance: 3f6a67a9-08db-4a15-ae07-bef02b9a6d48] yield [ 802.656446] env[69367]: ERROR nova.compute.manager [instance: 3f6a67a9-08db-4a15-ae07-bef02b9a6d48] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 802.656446] env[69367]: ERROR nova.compute.manager [instance: 3f6a67a9-08db-4a15-ae07-bef02b9a6d48] self.set_inventory_for_provider( [ 802.656446] env[69367]: ERROR nova.compute.manager [instance: 3f6a67a9-08db-4a15-ae07-bef02b9a6d48] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 802.656446] env[69367]: ERROR nova.compute.manager [instance: 3f6a67a9-08db-4a15-ae07-bef02b9a6d48] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 802.658062] env[69367]: ERROR nova.compute.manager [instance: 3f6a67a9-08db-4a15-ae07-bef02b9a6d48] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-b4c38ed1-57ec-4bfc-803f-0a2104fc5aa7"}]} [ 802.658062] env[69367]: ERROR nova.compute.manager [instance: 3f6a67a9-08db-4a15-ae07-bef02b9a6d48] [ 802.658062] env[69367]: ERROR nova.compute.manager [instance: 3f6a67a9-08db-4a15-ae07-bef02b9a6d48] During handling of the above exception, another exception occurred: [ 802.658062] env[69367]: ERROR nova.compute.manager [instance: 3f6a67a9-08db-4a15-ae07-bef02b9a6d48] [ 802.658062] env[69367]: ERROR nova.compute.manager [instance: 3f6a67a9-08db-4a15-ae07-bef02b9a6d48] Traceback (most recent call last): [ 802.658062] env[69367]: ERROR nova.compute.manager [instance: 3f6a67a9-08db-4a15-ae07-bef02b9a6d48] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 802.658062] env[69367]: ERROR nova.compute.manager [instance: 3f6a67a9-08db-4a15-ae07-bef02b9a6d48] with self.rt.instance_claim(context, instance, node, allocs, [ 802.658062] env[69367]: ERROR nova.compute.manager [instance: 3f6a67a9-08db-4a15-ae07-bef02b9a6d48] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 802.658062] env[69367]: ERROR nova.compute.manager [instance: 3f6a67a9-08db-4a15-ae07-bef02b9a6d48] return f(*args, **kwargs) [ 802.658444] env[69367]: ERROR nova.compute.manager [instance: 3f6a67a9-08db-4a15-ae07-bef02b9a6d48] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 802.658444] env[69367]: ERROR nova.compute.manager [instance: 3f6a67a9-08db-4a15-ae07-bef02b9a6d48] self._update(elevated, cn) [ 802.658444] env[69367]: ERROR nova.compute.manager [instance: 3f6a67a9-08db-4a15-ae07-bef02b9a6d48] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 802.658444] env[69367]: ERROR nova.compute.manager [instance: 3f6a67a9-08db-4a15-ae07-bef02b9a6d48] self._update_to_placement(context, compute_node, startup) [ 802.658444] env[69367]: ERROR nova.compute.manager [instance: 3f6a67a9-08db-4a15-ae07-bef02b9a6d48] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 802.658444] env[69367]: ERROR nova.compute.manager [instance: 3f6a67a9-08db-4a15-ae07-bef02b9a6d48] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 802.658444] env[69367]: ERROR nova.compute.manager [instance: 3f6a67a9-08db-4a15-ae07-bef02b9a6d48] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 802.658444] env[69367]: ERROR nova.compute.manager [instance: 3f6a67a9-08db-4a15-ae07-bef02b9a6d48] return attempt.get(self._wrap_exception) [ 802.658444] env[69367]: ERROR nova.compute.manager [instance: 3f6a67a9-08db-4a15-ae07-bef02b9a6d48] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 802.658444] env[69367]: ERROR nova.compute.manager [instance: 3f6a67a9-08db-4a15-ae07-bef02b9a6d48] six.reraise(self.value[0], self.value[1], self.value[2]) [ 802.658444] env[69367]: ERROR nova.compute.manager [instance: 3f6a67a9-08db-4a15-ae07-bef02b9a6d48] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 802.658444] env[69367]: ERROR nova.compute.manager [instance: 3f6a67a9-08db-4a15-ae07-bef02b9a6d48] raise value [ 802.658444] env[69367]: ERROR nova.compute.manager [instance: 3f6a67a9-08db-4a15-ae07-bef02b9a6d48] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 802.658798] env[69367]: ERROR nova.compute.manager [instance: 3f6a67a9-08db-4a15-ae07-bef02b9a6d48] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 802.658798] env[69367]: ERROR nova.compute.manager [instance: 3f6a67a9-08db-4a15-ae07-bef02b9a6d48] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 802.658798] env[69367]: ERROR nova.compute.manager [instance: 3f6a67a9-08db-4a15-ae07-bef02b9a6d48] self.reportclient.update_from_provider_tree( [ 802.658798] env[69367]: ERROR nova.compute.manager [instance: 3f6a67a9-08db-4a15-ae07-bef02b9a6d48] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 802.658798] env[69367]: ERROR nova.compute.manager [instance: 3f6a67a9-08db-4a15-ae07-bef02b9a6d48] with catch_all(pd.uuid): [ 802.658798] env[69367]: ERROR nova.compute.manager [instance: 3f6a67a9-08db-4a15-ae07-bef02b9a6d48] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 802.658798] env[69367]: ERROR nova.compute.manager [instance: 3f6a67a9-08db-4a15-ae07-bef02b9a6d48] self.gen.throw(typ, value, traceback) [ 802.658798] env[69367]: ERROR nova.compute.manager [instance: 3f6a67a9-08db-4a15-ae07-bef02b9a6d48] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 802.658798] env[69367]: ERROR nova.compute.manager [instance: 3f6a67a9-08db-4a15-ae07-bef02b9a6d48] raise exception.ResourceProviderSyncFailed() [ 802.658798] env[69367]: ERROR nova.compute.manager [instance: 3f6a67a9-08db-4a15-ae07-bef02b9a6d48] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 802.658798] env[69367]: ERROR nova.compute.manager [instance: 3f6a67a9-08db-4a15-ae07-bef02b9a6d48] [ 802.659127] env[69367]: DEBUG nova.compute.utils [None req-c3fd4c24-68da-4e55-bd81-27f04690e5d7 tempest-ImagesOneServerTestJSON-993522534 tempest-ImagesOneServerTestJSON-993522534-project-member] [instance: 3f6a67a9-08db-4a15-ae07-bef02b9a6d48] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 802.659127] env[69367]: DEBUG oslo_concurrency.lockutils [None req-b883f15e-cdb3-40b5-b382-c4d5549547fd tempest-ServerShowV247Test-1137483895 tempest-ServerShowV247Test-1137483895-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.321s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 802.660781] env[69367]: INFO nova.compute.claims [None req-b883f15e-cdb3-40b5-b382-c4d5549547fd tempest-ServerShowV247Test-1137483895 tempest-ServerShowV247Test-1137483895-project-member] [instance: 17ffa2b1-4a0a-4e14-a7b0-104791adf072] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 802.662870] env[69367]: DEBUG nova.compute.manager [None req-c3fd4c24-68da-4e55-bd81-27f04690e5d7 tempest-ImagesOneServerTestJSON-993522534 tempest-ImagesOneServerTestJSON-993522534-project-member] [instance: 3f6a67a9-08db-4a15-ae07-bef02b9a6d48] Build of instance 3f6a67a9-08db-4a15-ae07-bef02b9a6d48 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 802.663302] env[69367]: DEBUG nova.compute.manager [None req-c3fd4c24-68da-4e55-bd81-27f04690e5d7 tempest-ImagesOneServerTestJSON-993522534 tempest-ImagesOneServerTestJSON-993522534-project-member] [instance: 3f6a67a9-08db-4a15-ae07-bef02b9a6d48] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 802.663756] env[69367]: DEBUG oslo_concurrency.lockutils [None req-c3fd4c24-68da-4e55-bd81-27f04690e5d7 tempest-ImagesOneServerTestJSON-993522534 tempest-ImagesOneServerTestJSON-993522534-project-member] Acquiring lock "refresh_cache-3f6a67a9-08db-4a15-ae07-bef02b9a6d48" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 802.663756] env[69367]: DEBUG oslo_concurrency.lockutils [None req-c3fd4c24-68da-4e55-bd81-27f04690e5d7 tempest-ImagesOneServerTestJSON-993522534 tempest-ImagesOneServerTestJSON-993522534-project-member] Acquired lock "refresh_cache-3f6a67a9-08db-4a15-ae07-bef02b9a6d48" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 802.663858] env[69367]: DEBUG nova.network.neutron [None req-c3fd4c24-68da-4e55-bd81-27f04690e5d7 tempest-ImagesOneServerTestJSON-993522534 tempest-ImagesOneServerTestJSON-993522534-project-member] [instance: 3f6a67a9-08db-4a15-ae07-bef02b9a6d48] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 802.868077] env[69367]: DEBUG nova.objects.instance [None req-85ae9545-2286-457b-9c4a-a112188816da tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Lazy-loading 'pci_requests' on Instance uuid 837b4093-308b-440b-940d-fc0227a5c590 {{(pid=69367) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 803.174451] env[69367]: INFO nova.compute.manager [None req-a1156e1e-41ad-40b1-a520-164694ed7948 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 8a9bdd5c-b2f2-4b0a-9e9d-ac8984e8a5fa] Successfully reverted task state from spawning on failure for instance. [ 803.181200] env[69367]: ERROR oslo_messaging.rpc.server [None req-a1156e1e-41ad-40b1-a520-164694ed7948 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Exception during message handling: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 803.181200] env[69367]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 803.181200] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 803.181200] env[69367]: ERROR oslo_messaging.rpc.server yield [ 803.181200] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 803.181200] env[69367]: ERROR oslo_messaging.rpc.server self.set_inventory_for_provider( [ 803.181200] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 803.181200] env[69367]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 803.181200] env[69367]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-f5a92b48-5dd4-46fc-9635-2c0a10ca63c8"}]} [ 803.181200] env[69367]: ERROR oslo_messaging.rpc.server [ 803.181686] env[69367]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 803.181686] env[69367]: ERROR oslo_messaging.rpc.server [ 803.181686] env[69367]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 803.181686] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 174, in _process_incoming [ 803.181686] env[69367]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 803.181686] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 803.181686] env[69367]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 803.181686] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 803.181686] env[69367]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 803.181686] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 803.181686] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 803.181686] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 803.181686] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 803.181686] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 803.181686] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 803.181686] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 803.181686] env[69367]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 803.181686] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 167, in decorated_function [ 803.182271] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 803.182271] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 803.182271] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 803.182271] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 803.182271] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 803.182271] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 158, in decorated_function [ 803.182271] env[69367]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 803.182271] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1483, in decorated_function [ 803.182271] env[69367]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 803.182271] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 214, in decorated_function [ 803.182271] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 803.182271] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 803.182271] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 803.182271] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 803.182271] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 803.182271] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 204, in decorated_function [ 803.182271] env[69367]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 803.182271] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 7610, in unshelve_instance [ 803.182819] env[69367]: ERROR oslo_messaging.rpc.server do_unshelve_instance() [ 803.182819] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 803.182819] env[69367]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 803.182819] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 7607, in do_unshelve_instance [ 803.182819] env[69367]: ERROR oslo_messaging.rpc.server self._unshelve_instance( [ 803.182819] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 7698, in _unshelve_instance [ 803.182819] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(logger=LOG): [ 803.182819] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 803.182819] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 803.182819] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 803.182819] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 803.182819] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 7682, in _unshelve_instance [ 803.182819] env[69367]: ERROR oslo_messaging.rpc.server with self.rt.instance_claim(context, instance, node, allocations, [ 803.182819] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 803.182819] env[69367]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 803.182819] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 803.182819] env[69367]: ERROR oslo_messaging.rpc.server self._update(elevated, cn) [ 803.182819] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 803.183352] env[69367]: ERROR oslo_messaging.rpc.server self._update_to_placement(context, compute_node, startup) [ 803.183352] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 803.183352] env[69367]: ERROR oslo_messaging.rpc.server return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 803.183352] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 803.183352] env[69367]: ERROR oslo_messaging.rpc.server return attempt.get(self._wrap_exception) [ 803.183352] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 803.183352] env[69367]: ERROR oslo_messaging.rpc.server six.reraise(self.value[0], self.value[1], self.value[2]) [ 803.183352] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 803.183352] env[69367]: ERROR oslo_messaging.rpc.server raise value [ 803.183352] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 803.183352] env[69367]: ERROR oslo_messaging.rpc.server attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 803.183352] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 803.183352] env[69367]: ERROR oslo_messaging.rpc.server self.reportclient.update_from_provider_tree( [ 803.183352] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 803.183352] env[69367]: ERROR oslo_messaging.rpc.server with catch_all(pd.uuid): [ 803.183352] env[69367]: ERROR oslo_messaging.rpc.server File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 803.183352] env[69367]: ERROR oslo_messaging.rpc.server self.gen.throw(typ, value, traceback) [ 803.183352] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 803.183824] env[69367]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderSyncFailed() [ 803.183824] env[69367]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 803.183824] env[69367]: ERROR oslo_messaging.rpc.server [ 803.196287] env[69367]: DEBUG nova.network.neutron [None req-c3fd4c24-68da-4e55-bd81-27f04690e5d7 tempest-ImagesOneServerTestJSON-993522534 tempest-ImagesOneServerTestJSON-993522534-project-member] [instance: 3f6a67a9-08db-4a15-ae07-bef02b9a6d48] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 803.320178] env[69367]: DEBUG nova.network.neutron [None req-c3fd4c24-68da-4e55-bd81-27f04690e5d7 tempest-ImagesOneServerTestJSON-993522534 tempest-ImagesOneServerTestJSON-993522534-project-member] [instance: 3f6a67a9-08db-4a15-ae07-bef02b9a6d48] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 803.371310] env[69367]: DEBUG nova.objects.base [None req-85ae9545-2286-457b-9c4a-a112188816da tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Object Instance<837b4093-308b-440b-940d-fc0227a5c590> lazy-loaded attributes: flavor,pci_requests {{(pid=69367) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 803.371541] env[69367]: DEBUG nova.network.neutron [None req-85ae9545-2286-457b-9c4a-a112188816da tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 837b4093-308b-440b-940d-fc0227a5c590] allocate_for_instance() {{(pid=69367) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 803.435611] env[69367]: DEBUG nova.policy [None req-85ae9545-2286-457b-9c4a-a112188816da tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5360c8b93a954bd0832ebadea6983ef1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f5bc3d470905412ea72a8eedb98e9e47', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69367) authorize /opt/stack/nova/nova/policy.py:192}} [ 803.691200] env[69367]: DEBUG nova.scheduler.client.report [None req-b883f15e-cdb3-40b5-b382-c4d5549547fd tempest-ServerShowV247Test-1137483895 tempest-ServerShowV247Test-1137483895-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 803.716253] env[69367]: DEBUG nova.scheduler.client.report [None req-b883f15e-cdb3-40b5-b382-c4d5549547fd tempest-ServerShowV247Test-1137483895 tempest-ServerShowV247Test-1137483895-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 803.716559] env[69367]: DEBUG nova.compute.provider_tree [None req-b883f15e-cdb3-40b5-b382-c4d5549547fd tempest-ServerShowV247Test-1137483895 tempest-ServerShowV247Test-1137483895-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 803.728958] env[69367]: DEBUG nova.scheduler.client.report [None req-b883f15e-cdb3-40b5-b382-c4d5549547fd tempest-ServerShowV247Test-1137483895 tempest-ServerShowV247Test-1137483895-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 803.751024] env[69367]: DEBUG nova.network.neutron [None req-85ae9545-2286-457b-9c4a-a112188816da tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Successfully created port: adb5a8dc-4297-4171-b593-e1230f1746c7 {{(pid=69367) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 803.753959] env[69367]: DEBUG nova.scheduler.client.report [None req-b883f15e-cdb3-40b5-b382-c4d5549547fd tempest-ServerShowV247Test-1137483895 tempest-ServerShowV247Test-1137483895-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 803.823266] env[69367]: DEBUG oslo_concurrency.lockutils [None req-c3fd4c24-68da-4e55-bd81-27f04690e5d7 tempest-ImagesOneServerTestJSON-993522534 tempest-ImagesOneServerTestJSON-993522534-project-member] Releasing lock "refresh_cache-3f6a67a9-08db-4a15-ae07-bef02b9a6d48" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 803.824833] env[69367]: DEBUG nova.compute.manager [None req-c3fd4c24-68da-4e55-bd81-27f04690e5d7 tempest-ImagesOneServerTestJSON-993522534 tempest-ImagesOneServerTestJSON-993522534-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 803.824833] env[69367]: DEBUG nova.compute.manager [None req-c3fd4c24-68da-4e55-bd81-27f04690e5d7 tempest-ImagesOneServerTestJSON-993522534 tempest-ImagesOneServerTestJSON-993522534-project-member] [instance: 3f6a67a9-08db-4a15-ae07-bef02b9a6d48] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 803.824833] env[69367]: DEBUG nova.network.neutron [None req-c3fd4c24-68da-4e55-bd81-27f04690e5d7 tempest-ImagesOneServerTestJSON-993522534 tempest-ImagesOneServerTestJSON-993522534-project-member] [instance: 3f6a67a9-08db-4a15-ae07-bef02b9a6d48] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 803.846643] env[69367]: DEBUG nova.network.neutron [None req-c3fd4c24-68da-4e55-bd81-27f04690e5d7 tempest-ImagesOneServerTestJSON-993522534 tempest-ImagesOneServerTestJSON-993522534-project-member] [instance: 3f6a67a9-08db-4a15-ae07-bef02b9a6d48] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 804.243485] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76a759fb-fc44-4043-9a69-2fae237b4a6c {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.252394] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e53a12c1-80eb-40c5-a313-57d75175f841 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.282703] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac9e5c61-3de4-4b6d-8bc4-a36befeb7ef3 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.290631] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6419402-4d2f-4382-bd51-c562499c0fa6 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.312092] env[69367]: DEBUG nova.compute.provider_tree [None req-b883f15e-cdb3-40b5-b382-c4d5549547fd tempest-ServerShowV247Test-1137483895 tempest-ServerShowV247Test-1137483895-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 804.350802] env[69367]: DEBUG nova.network.neutron [None req-c3fd4c24-68da-4e55-bd81-27f04690e5d7 tempest-ImagesOneServerTestJSON-993522534 tempest-ImagesOneServerTestJSON-993522534-project-member] [instance: 3f6a67a9-08db-4a15-ae07-bef02b9a6d48] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 804.833302] env[69367]: ERROR nova.scheduler.client.report [None req-b883f15e-cdb3-40b5-b382-c4d5549547fd tempest-ServerShowV247Test-1137483895 tempest-ServerShowV247Test-1137483895-project-member] [req-1c61da85-7b22-4b64-9f2a-cbbe01cd0026] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-1c61da85-7b22-4b64-9f2a-cbbe01cd0026"}]} [ 804.833737] env[69367]: DEBUG oslo_concurrency.lockutils [None req-b883f15e-cdb3-40b5-b382-c4d5549547fd tempest-ServerShowV247Test-1137483895 tempest-ServerShowV247Test-1137483895-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.175s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 804.834352] env[69367]: ERROR nova.compute.manager [None req-b883f15e-cdb3-40b5-b382-c4d5549547fd tempest-ServerShowV247Test-1137483895 tempest-ServerShowV247Test-1137483895-project-member] [instance: 17ffa2b1-4a0a-4e14-a7b0-104791adf072] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 804.834352] env[69367]: ERROR nova.compute.manager [instance: 17ffa2b1-4a0a-4e14-a7b0-104791adf072] Traceback (most recent call last): [ 804.834352] env[69367]: ERROR nova.compute.manager [instance: 17ffa2b1-4a0a-4e14-a7b0-104791adf072] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 804.834352] env[69367]: ERROR nova.compute.manager [instance: 17ffa2b1-4a0a-4e14-a7b0-104791adf072] yield [ 804.834352] env[69367]: ERROR nova.compute.manager [instance: 17ffa2b1-4a0a-4e14-a7b0-104791adf072] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 804.834352] env[69367]: ERROR nova.compute.manager [instance: 17ffa2b1-4a0a-4e14-a7b0-104791adf072] self.set_inventory_for_provider( [ 804.834352] env[69367]: ERROR nova.compute.manager [instance: 17ffa2b1-4a0a-4e14-a7b0-104791adf072] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 804.834352] env[69367]: ERROR nova.compute.manager [instance: 17ffa2b1-4a0a-4e14-a7b0-104791adf072] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 804.834666] env[69367]: ERROR nova.compute.manager [instance: 17ffa2b1-4a0a-4e14-a7b0-104791adf072] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-1c61da85-7b22-4b64-9f2a-cbbe01cd0026"}]} [ 804.834666] env[69367]: ERROR nova.compute.manager [instance: 17ffa2b1-4a0a-4e14-a7b0-104791adf072] [ 804.834666] env[69367]: ERROR nova.compute.manager [instance: 17ffa2b1-4a0a-4e14-a7b0-104791adf072] During handling of the above exception, another exception occurred: [ 804.834666] env[69367]: ERROR nova.compute.manager [instance: 17ffa2b1-4a0a-4e14-a7b0-104791adf072] [ 804.834666] env[69367]: ERROR nova.compute.manager [instance: 17ffa2b1-4a0a-4e14-a7b0-104791adf072] Traceback (most recent call last): [ 804.834666] env[69367]: ERROR nova.compute.manager [instance: 17ffa2b1-4a0a-4e14-a7b0-104791adf072] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 804.834666] env[69367]: ERROR nova.compute.manager [instance: 17ffa2b1-4a0a-4e14-a7b0-104791adf072] with self.rt.instance_claim(context, instance, node, allocs, [ 804.834666] env[69367]: ERROR nova.compute.manager [instance: 17ffa2b1-4a0a-4e14-a7b0-104791adf072] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 804.834666] env[69367]: ERROR nova.compute.manager [instance: 17ffa2b1-4a0a-4e14-a7b0-104791adf072] return f(*args, **kwargs) [ 804.835167] env[69367]: ERROR nova.compute.manager [instance: 17ffa2b1-4a0a-4e14-a7b0-104791adf072] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 804.835167] env[69367]: ERROR nova.compute.manager [instance: 17ffa2b1-4a0a-4e14-a7b0-104791adf072] self._update(elevated, cn) [ 804.835167] env[69367]: ERROR nova.compute.manager [instance: 17ffa2b1-4a0a-4e14-a7b0-104791adf072] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 804.835167] env[69367]: ERROR nova.compute.manager [instance: 17ffa2b1-4a0a-4e14-a7b0-104791adf072] self._update_to_placement(context, compute_node, startup) [ 804.835167] env[69367]: ERROR nova.compute.manager [instance: 17ffa2b1-4a0a-4e14-a7b0-104791adf072] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 804.835167] env[69367]: ERROR nova.compute.manager [instance: 17ffa2b1-4a0a-4e14-a7b0-104791adf072] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 804.835167] env[69367]: ERROR nova.compute.manager [instance: 17ffa2b1-4a0a-4e14-a7b0-104791adf072] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 804.835167] env[69367]: ERROR nova.compute.manager [instance: 17ffa2b1-4a0a-4e14-a7b0-104791adf072] return attempt.get(self._wrap_exception) [ 804.835167] env[69367]: ERROR nova.compute.manager [instance: 17ffa2b1-4a0a-4e14-a7b0-104791adf072] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 804.835167] env[69367]: ERROR nova.compute.manager [instance: 17ffa2b1-4a0a-4e14-a7b0-104791adf072] six.reraise(self.value[0], self.value[1], self.value[2]) [ 804.835167] env[69367]: ERROR nova.compute.manager [instance: 17ffa2b1-4a0a-4e14-a7b0-104791adf072] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 804.835167] env[69367]: ERROR nova.compute.manager [instance: 17ffa2b1-4a0a-4e14-a7b0-104791adf072] raise value [ 804.835167] env[69367]: ERROR nova.compute.manager [instance: 17ffa2b1-4a0a-4e14-a7b0-104791adf072] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 804.835672] env[69367]: ERROR nova.compute.manager [instance: 17ffa2b1-4a0a-4e14-a7b0-104791adf072] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 804.835672] env[69367]: ERROR nova.compute.manager [instance: 17ffa2b1-4a0a-4e14-a7b0-104791adf072] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 804.835672] env[69367]: ERROR nova.compute.manager [instance: 17ffa2b1-4a0a-4e14-a7b0-104791adf072] self.reportclient.update_from_provider_tree( [ 804.835672] env[69367]: ERROR nova.compute.manager [instance: 17ffa2b1-4a0a-4e14-a7b0-104791adf072] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 804.835672] env[69367]: ERROR nova.compute.manager [instance: 17ffa2b1-4a0a-4e14-a7b0-104791adf072] with catch_all(pd.uuid): [ 804.835672] env[69367]: ERROR nova.compute.manager [instance: 17ffa2b1-4a0a-4e14-a7b0-104791adf072] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 804.835672] env[69367]: ERROR nova.compute.manager [instance: 17ffa2b1-4a0a-4e14-a7b0-104791adf072] self.gen.throw(typ, value, traceback) [ 804.835672] env[69367]: ERROR nova.compute.manager [instance: 17ffa2b1-4a0a-4e14-a7b0-104791adf072] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 804.835672] env[69367]: ERROR nova.compute.manager [instance: 17ffa2b1-4a0a-4e14-a7b0-104791adf072] raise exception.ResourceProviderSyncFailed() [ 804.835672] env[69367]: ERROR nova.compute.manager [instance: 17ffa2b1-4a0a-4e14-a7b0-104791adf072] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 804.835672] env[69367]: ERROR nova.compute.manager [instance: 17ffa2b1-4a0a-4e14-a7b0-104791adf072] [ 804.836036] env[69367]: DEBUG nova.compute.utils [None req-b883f15e-cdb3-40b5-b382-c4d5549547fd tempest-ServerShowV247Test-1137483895 tempest-ServerShowV247Test-1137483895-project-member] [instance: 17ffa2b1-4a0a-4e14-a7b0-104791adf072] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 804.836275] env[69367]: DEBUG oslo_concurrency.lockutils [None req-320c0afb-579c-4ab6-a9bf-6526f9c412f0 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.521s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 804.836467] env[69367]: DEBUG oslo_concurrency.lockutils [None req-320c0afb-579c-4ab6-a9bf-6526f9c412f0 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 804.836649] env[69367]: INFO nova.compute.manager [None req-320c0afb-579c-4ab6-a9bf-6526f9c412f0 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] [instance: 011ab7de-98a7-41fc-9e05-e71965c73c09] Successfully reverted task state from None on failure for instance. [ 804.838745] env[69367]: DEBUG oslo_concurrency.lockutils [None req-191f9260-80f3-498c-9752-3dd1e1bfc50b tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.423s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 804.840143] env[69367]: INFO nova.compute.claims [None req-191f9260-80f3-498c-9752-3dd1e1bfc50b tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: dd598b7a-057f-48ea-a31e-96e7ccadeb3d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 804.842926] env[69367]: DEBUG nova.compute.manager [None req-b883f15e-cdb3-40b5-b382-c4d5549547fd tempest-ServerShowV247Test-1137483895 tempest-ServerShowV247Test-1137483895-project-member] [instance: 17ffa2b1-4a0a-4e14-a7b0-104791adf072] Build of instance 17ffa2b1-4a0a-4e14-a7b0-104791adf072 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 804.843404] env[69367]: DEBUG nova.compute.manager [None req-b883f15e-cdb3-40b5-b382-c4d5549547fd tempest-ServerShowV247Test-1137483895 tempest-ServerShowV247Test-1137483895-project-member] [instance: 17ffa2b1-4a0a-4e14-a7b0-104791adf072] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 804.843630] env[69367]: DEBUG oslo_concurrency.lockutils [None req-b883f15e-cdb3-40b5-b382-c4d5549547fd tempest-ServerShowV247Test-1137483895 tempest-ServerShowV247Test-1137483895-project-member] Acquiring lock "refresh_cache-17ffa2b1-4a0a-4e14-a7b0-104791adf072" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 804.843779] env[69367]: DEBUG oslo_concurrency.lockutils [None req-b883f15e-cdb3-40b5-b382-c4d5549547fd tempest-ServerShowV247Test-1137483895 tempest-ServerShowV247Test-1137483895-project-member] Acquired lock "refresh_cache-17ffa2b1-4a0a-4e14-a7b0-104791adf072" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 804.843938] env[69367]: DEBUG nova.network.neutron [None req-b883f15e-cdb3-40b5-b382-c4d5549547fd tempest-ServerShowV247Test-1137483895 tempest-ServerShowV247Test-1137483895-project-member] [instance: 17ffa2b1-4a0a-4e14-a7b0-104791adf072] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 804.845478] env[69367]: ERROR oslo_messaging.rpc.server [None req-320c0afb-579c-4ab6-a9bf-6526f9c412f0 tempest-ServersListShow296Test-47020523 tempest-ServersListShow296Test-47020523-project-member] Exception during message handling: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 804.845478] env[69367]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 804.845478] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 804.845478] env[69367]: ERROR oslo_messaging.rpc.server yield [ 804.845478] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 804.845478] env[69367]: ERROR oslo_messaging.rpc.server self.set_inventory_for_provider( [ 804.845478] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 804.845478] env[69367]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 804.845478] env[69367]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-b0e674ac-8af2-41f2-af08-c740f6ac3853"}]} [ 804.845478] env[69367]: ERROR oslo_messaging.rpc.server [ 804.845822] env[69367]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 804.845822] env[69367]: ERROR oslo_messaging.rpc.server [ 804.845822] env[69367]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 804.845822] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 174, in _process_incoming [ 804.845822] env[69367]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 804.845822] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 804.845822] env[69367]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 804.845822] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 804.845822] env[69367]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 804.845822] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 804.845822] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 804.845822] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 804.845822] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 804.845822] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 804.845822] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 804.845822] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 804.845822] env[69367]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 804.845822] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 167, in decorated_function [ 804.846395] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 804.846395] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 804.846395] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 804.846395] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 804.846395] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 804.846395] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 158, in decorated_function [ 804.846395] env[69367]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 804.846395] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1483, in decorated_function [ 804.846395] env[69367]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 804.846395] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 214, in decorated_function [ 804.846395] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 804.846395] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 804.846395] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 804.846395] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 804.846395] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 804.846395] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 204, in decorated_function [ 804.846395] env[69367]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 804.846395] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3388, in terminate_instance [ 804.846922] env[69367]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 804.846922] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 804.846922] env[69367]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 804.846922] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3383, in do_terminate_instance [ 804.846922] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 804.846922] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 804.846922] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 804.846922] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 804.846922] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 804.846922] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 804.846922] env[69367]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 804.846922] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 804.846922] env[69367]: ERROR oslo_messaging.rpc.server self._complete_deletion(context, instance) [ 804.846922] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 804.846922] env[69367]: ERROR oslo_messaging.rpc.server self._update_resource_tracker(context, instance) [ 804.846922] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 804.846922] env[69367]: ERROR oslo_messaging.rpc.server self.rt.update_usage(context, instance, instance.node) [ 804.846922] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 804.847473] env[69367]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 804.847473] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 804.847473] env[69367]: ERROR oslo_messaging.rpc.server self._update(context.elevated(), self.compute_nodes[nodename]) [ 804.847473] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 804.847473] env[69367]: ERROR oslo_messaging.rpc.server self._update_to_placement(context, compute_node, startup) [ 804.847473] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 804.847473] env[69367]: ERROR oslo_messaging.rpc.server return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 804.847473] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 804.847473] env[69367]: ERROR oslo_messaging.rpc.server return attempt.get(self._wrap_exception) [ 804.847473] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 804.847473] env[69367]: ERROR oslo_messaging.rpc.server six.reraise(self.value[0], self.value[1], self.value[2]) [ 804.847473] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 804.847473] env[69367]: ERROR oslo_messaging.rpc.server raise value [ 804.847473] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 804.847473] env[69367]: ERROR oslo_messaging.rpc.server attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 804.847473] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 804.847473] env[69367]: ERROR oslo_messaging.rpc.server self.reportclient.update_from_provider_tree( [ 804.847935] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 804.847935] env[69367]: ERROR oslo_messaging.rpc.server with catch_all(pd.uuid): [ 804.847935] env[69367]: ERROR oslo_messaging.rpc.server File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 804.847935] env[69367]: ERROR oslo_messaging.rpc.server self.gen.throw(typ, value, traceback) [ 804.847935] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 804.847935] env[69367]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderSyncFailed() [ 804.847935] env[69367]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 804.847935] env[69367]: ERROR oslo_messaging.rpc.server [ 804.853054] env[69367]: INFO nova.compute.manager [None req-c3fd4c24-68da-4e55-bd81-27f04690e5d7 tempest-ImagesOneServerTestJSON-993522534 tempest-ImagesOneServerTestJSON-993522534-project-member] [instance: 3f6a67a9-08db-4a15-ae07-bef02b9a6d48] Took 1.03 seconds to deallocate network for instance. [ 805.284310] env[69367]: DEBUG nova.compute.manager [req-ca57c723-e9d9-48ee-b002-608e1a19c06e req-88a6a495-7ace-42ce-b876-c9f93e42c0e1 service nova] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Received event network-vif-plugged-adb5a8dc-4297-4171-b593-e1230f1746c7 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 805.284310] env[69367]: DEBUG oslo_concurrency.lockutils [req-ca57c723-e9d9-48ee-b002-608e1a19c06e req-88a6a495-7ace-42ce-b876-c9f93e42c0e1 service nova] Acquiring lock "837b4093-308b-440b-940d-fc0227a5c590-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 805.284310] env[69367]: DEBUG oslo_concurrency.lockutils [req-ca57c723-e9d9-48ee-b002-608e1a19c06e req-88a6a495-7ace-42ce-b876-c9f93e42c0e1 service nova] Lock "837b4093-308b-440b-940d-fc0227a5c590-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 805.284310] env[69367]: DEBUG oslo_concurrency.lockutils [req-ca57c723-e9d9-48ee-b002-608e1a19c06e req-88a6a495-7ace-42ce-b876-c9f93e42c0e1 service nova] Lock "837b4093-308b-440b-940d-fc0227a5c590-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 805.285071] env[69367]: DEBUG nova.compute.manager [req-ca57c723-e9d9-48ee-b002-608e1a19c06e req-88a6a495-7ace-42ce-b876-c9f93e42c0e1 service nova] [instance: 837b4093-308b-440b-940d-fc0227a5c590] No waiting events found dispatching network-vif-plugged-adb5a8dc-4297-4171-b593-e1230f1746c7 {{(pid=69367) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 805.285381] env[69367]: WARNING nova.compute.manager [req-ca57c723-e9d9-48ee-b002-608e1a19c06e req-88a6a495-7ace-42ce-b876-c9f93e42c0e1 service nova] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Received unexpected event network-vif-plugged-adb5a8dc-4297-4171-b593-e1230f1746c7 for instance with vm_state active and task_state None. [ 805.492140] env[69367]: DEBUG nova.network.neutron [None req-b883f15e-cdb3-40b5-b382-c4d5549547fd tempest-ServerShowV247Test-1137483895 tempest-ServerShowV247Test-1137483895-project-member] [instance: 17ffa2b1-4a0a-4e14-a7b0-104791adf072] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 805.585583] env[69367]: DEBUG nova.network.neutron [None req-b883f15e-cdb3-40b5-b382-c4d5549547fd tempest-ServerShowV247Test-1137483895 tempest-ServerShowV247Test-1137483895-project-member] [instance: 17ffa2b1-4a0a-4e14-a7b0-104791adf072] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 805.646583] env[69367]: DEBUG nova.network.neutron [None req-85ae9545-2286-457b-9c4a-a112188816da tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Successfully updated port: adb5a8dc-4297-4171-b593-e1230f1746c7 {{(pid=69367) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 805.874891] env[69367]: DEBUG nova.scheduler.client.report [None req-191f9260-80f3-498c-9752-3dd1e1bfc50b tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 805.890098] env[69367]: DEBUG nova.scheduler.client.report [None req-191f9260-80f3-498c-9752-3dd1e1bfc50b tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 805.890381] env[69367]: DEBUG nova.compute.provider_tree [None req-191f9260-80f3-498c-9752-3dd1e1bfc50b tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 805.893328] env[69367]: INFO nova.scheduler.client.report [None req-c3fd4c24-68da-4e55-bd81-27f04690e5d7 tempest-ImagesOneServerTestJSON-993522534 tempest-ImagesOneServerTestJSON-993522534-project-member] Deleted allocations for instance 3f6a67a9-08db-4a15-ae07-bef02b9a6d48 [ 805.901394] env[69367]: DEBUG nova.scheduler.client.report [None req-191f9260-80f3-498c-9752-3dd1e1bfc50b tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 805.924274] env[69367]: DEBUG nova.scheduler.client.report [None req-191f9260-80f3-498c-9752-3dd1e1bfc50b tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 806.087311] env[69367]: DEBUG oslo_concurrency.lockutils [None req-b883f15e-cdb3-40b5-b382-c4d5549547fd tempest-ServerShowV247Test-1137483895 tempest-ServerShowV247Test-1137483895-project-member] Releasing lock "refresh_cache-17ffa2b1-4a0a-4e14-a7b0-104791adf072" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 806.088307] env[69367]: DEBUG nova.compute.manager [None req-b883f15e-cdb3-40b5-b382-c4d5549547fd tempest-ServerShowV247Test-1137483895 tempest-ServerShowV247Test-1137483895-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 806.088307] env[69367]: DEBUG nova.compute.manager [None req-b883f15e-cdb3-40b5-b382-c4d5549547fd tempest-ServerShowV247Test-1137483895 tempest-ServerShowV247Test-1137483895-project-member] [instance: 17ffa2b1-4a0a-4e14-a7b0-104791adf072] Skipping network deallocation for instance since networking was not requested. {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 806.148938] env[69367]: DEBUG oslo_concurrency.lockutils [None req-85ae9545-2286-457b-9c4a-a112188816da tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Acquiring lock "refresh_cache-837b4093-308b-440b-940d-fc0227a5c590" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 806.149430] env[69367]: DEBUG oslo_concurrency.lockutils [None req-85ae9545-2286-457b-9c4a-a112188816da tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Acquired lock "refresh_cache-837b4093-308b-440b-940d-fc0227a5c590" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 806.149750] env[69367]: DEBUG nova.network.neutron [None req-85ae9545-2286-457b-9c4a-a112188816da tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 806.369584] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cc37b74-b28d-4dd8-94f0-371f51ed8410 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.378047] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e73ece42-bd1b-42a2-92aa-666a69f1a6cd {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.408113] env[69367]: DEBUG oslo_concurrency.lockutils [None req-c3fd4c24-68da-4e55-bd81-27f04690e5d7 tempest-ImagesOneServerTestJSON-993522534 tempest-ImagesOneServerTestJSON-993522534-project-member] Lock "3f6a67a9-08db-4a15-ae07-bef02b9a6d48" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 71.664s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 806.410179] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a65f502-5ad5-4183-a89c-c3f8f4042873 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.418862] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-829182d8-9449-4fd9-9ebf-667ff29b31ec {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.435900] env[69367]: DEBUG nova.compute.provider_tree [None req-191f9260-80f3-498c-9752-3dd1e1bfc50b tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 806.688119] env[69367]: WARNING nova.network.neutron [None req-85ae9545-2286-457b-9c4a-a112188816da tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 837b4093-308b-440b-940d-fc0227a5c590] a8e5c1a6-2526-4042-8a8d-fdb54dbe7bf9 already exists in list: networks containing: ['a8e5c1a6-2526-4042-8a8d-fdb54dbe7bf9']. ignoring it [ 806.914217] env[69367]: DEBUG nova.compute.manager [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 806.967091] env[69367]: ERROR nova.scheduler.client.report [None req-191f9260-80f3-498c-9752-3dd1e1bfc50b tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [req-e7bc7112-d9ce-4933-8664-e20c14b5de79] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-e7bc7112-d9ce-4933-8664-e20c14b5de79"}]} [ 806.967501] env[69367]: DEBUG oslo_concurrency.lockutils [None req-191f9260-80f3-498c-9752-3dd1e1bfc50b tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.129s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 806.968127] env[69367]: ERROR nova.compute.manager [None req-191f9260-80f3-498c-9752-3dd1e1bfc50b tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: dd598b7a-057f-48ea-a31e-96e7ccadeb3d] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 806.968127] env[69367]: ERROR nova.compute.manager [instance: dd598b7a-057f-48ea-a31e-96e7ccadeb3d] Traceback (most recent call last): [ 806.968127] env[69367]: ERROR nova.compute.manager [instance: dd598b7a-057f-48ea-a31e-96e7ccadeb3d] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 806.968127] env[69367]: ERROR nova.compute.manager [instance: dd598b7a-057f-48ea-a31e-96e7ccadeb3d] yield [ 806.968127] env[69367]: ERROR nova.compute.manager [instance: dd598b7a-057f-48ea-a31e-96e7ccadeb3d] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 806.968127] env[69367]: ERROR nova.compute.manager [instance: dd598b7a-057f-48ea-a31e-96e7ccadeb3d] self.set_inventory_for_provider( [ 806.968127] env[69367]: ERROR nova.compute.manager [instance: dd598b7a-057f-48ea-a31e-96e7ccadeb3d] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 806.968127] env[69367]: ERROR nova.compute.manager [instance: dd598b7a-057f-48ea-a31e-96e7ccadeb3d] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 806.968739] env[69367]: ERROR nova.compute.manager [instance: dd598b7a-057f-48ea-a31e-96e7ccadeb3d] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-e7bc7112-d9ce-4933-8664-e20c14b5de79"}]} [ 806.968739] env[69367]: ERROR nova.compute.manager [instance: dd598b7a-057f-48ea-a31e-96e7ccadeb3d] [ 806.968739] env[69367]: ERROR nova.compute.manager [instance: dd598b7a-057f-48ea-a31e-96e7ccadeb3d] During handling of the above exception, another exception occurred: [ 806.968739] env[69367]: ERROR nova.compute.manager [instance: dd598b7a-057f-48ea-a31e-96e7ccadeb3d] [ 806.968739] env[69367]: ERROR nova.compute.manager [instance: dd598b7a-057f-48ea-a31e-96e7ccadeb3d] Traceback (most recent call last): [ 806.968739] env[69367]: ERROR nova.compute.manager [instance: dd598b7a-057f-48ea-a31e-96e7ccadeb3d] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 806.968739] env[69367]: ERROR nova.compute.manager [instance: dd598b7a-057f-48ea-a31e-96e7ccadeb3d] with self.rt.instance_claim(context, instance, node, allocs, [ 806.968739] env[69367]: ERROR nova.compute.manager [instance: dd598b7a-057f-48ea-a31e-96e7ccadeb3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 806.968739] env[69367]: ERROR nova.compute.manager [instance: dd598b7a-057f-48ea-a31e-96e7ccadeb3d] return f(*args, **kwargs) [ 806.969394] env[69367]: ERROR nova.compute.manager [instance: dd598b7a-057f-48ea-a31e-96e7ccadeb3d] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 806.969394] env[69367]: ERROR nova.compute.manager [instance: dd598b7a-057f-48ea-a31e-96e7ccadeb3d] self._update(elevated, cn) [ 806.969394] env[69367]: ERROR nova.compute.manager [instance: dd598b7a-057f-48ea-a31e-96e7ccadeb3d] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 806.969394] env[69367]: ERROR nova.compute.manager [instance: dd598b7a-057f-48ea-a31e-96e7ccadeb3d] self._update_to_placement(context, compute_node, startup) [ 806.969394] env[69367]: ERROR nova.compute.manager [instance: dd598b7a-057f-48ea-a31e-96e7ccadeb3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 806.969394] env[69367]: ERROR nova.compute.manager [instance: dd598b7a-057f-48ea-a31e-96e7ccadeb3d] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 806.969394] env[69367]: ERROR nova.compute.manager [instance: dd598b7a-057f-48ea-a31e-96e7ccadeb3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 806.969394] env[69367]: ERROR nova.compute.manager [instance: dd598b7a-057f-48ea-a31e-96e7ccadeb3d] return attempt.get(self._wrap_exception) [ 806.969394] env[69367]: ERROR nova.compute.manager [instance: dd598b7a-057f-48ea-a31e-96e7ccadeb3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 806.969394] env[69367]: ERROR nova.compute.manager [instance: dd598b7a-057f-48ea-a31e-96e7ccadeb3d] six.reraise(self.value[0], self.value[1], self.value[2]) [ 806.969394] env[69367]: ERROR nova.compute.manager [instance: dd598b7a-057f-48ea-a31e-96e7ccadeb3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 806.969394] env[69367]: ERROR nova.compute.manager [instance: dd598b7a-057f-48ea-a31e-96e7ccadeb3d] raise value [ 806.969394] env[69367]: ERROR nova.compute.manager [instance: dd598b7a-057f-48ea-a31e-96e7ccadeb3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 806.969778] env[69367]: ERROR nova.compute.manager [instance: dd598b7a-057f-48ea-a31e-96e7ccadeb3d] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 806.969778] env[69367]: ERROR nova.compute.manager [instance: dd598b7a-057f-48ea-a31e-96e7ccadeb3d] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 806.969778] env[69367]: ERROR nova.compute.manager [instance: dd598b7a-057f-48ea-a31e-96e7ccadeb3d] self.reportclient.update_from_provider_tree( [ 806.969778] env[69367]: ERROR nova.compute.manager [instance: dd598b7a-057f-48ea-a31e-96e7ccadeb3d] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 806.969778] env[69367]: ERROR nova.compute.manager [instance: dd598b7a-057f-48ea-a31e-96e7ccadeb3d] with catch_all(pd.uuid): [ 806.969778] env[69367]: ERROR nova.compute.manager [instance: dd598b7a-057f-48ea-a31e-96e7ccadeb3d] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 806.969778] env[69367]: ERROR nova.compute.manager [instance: dd598b7a-057f-48ea-a31e-96e7ccadeb3d] self.gen.throw(typ, value, traceback) [ 806.969778] env[69367]: ERROR nova.compute.manager [instance: dd598b7a-057f-48ea-a31e-96e7ccadeb3d] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 806.969778] env[69367]: ERROR nova.compute.manager [instance: dd598b7a-057f-48ea-a31e-96e7ccadeb3d] raise exception.ResourceProviderSyncFailed() [ 806.969778] env[69367]: ERROR nova.compute.manager [instance: dd598b7a-057f-48ea-a31e-96e7ccadeb3d] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 806.969778] env[69367]: ERROR nova.compute.manager [instance: dd598b7a-057f-48ea-a31e-96e7ccadeb3d] [ 806.970112] env[69367]: DEBUG nova.compute.utils [None req-191f9260-80f3-498c-9752-3dd1e1bfc50b tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: dd598b7a-057f-48ea-a31e-96e7ccadeb3d] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 806.970145] env[69367]: DEBUG oslo_concurrency.lockutils [None req-3172fd0f-e37a-4da4-b881-902c58437d42 tempest-ServerMetadataNegativeTestJSON-995045374 tempest-ServerMetadataNegativeTestJSON-995045374-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.534s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 806.971611] env[69367]: INFO nova.compute.claims [None req-3172fd0f-e37a-4da4-b881-902c58437d42 tempest-ServerMetadataNegativeTestJSON-995045374 tempest-ServerMetadataNegativeTestJSON-995045374-project-member] [instance: ac440ec4-8b1a-465a-a84d-66e8c823836b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 806.975393] env[69367]: DEBUG nova.compute.manager [None req-191f9260-80f3-498c-9752-3dd1e1bfc50b tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: dd598b7a-057f-48ea-a31e-96e7ccadeb3d] Build of instance dd598b7a-057f-48ea-a31e-96e7ccadeb3d was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 806.975947] env[69367]: DEBUG nova.compute.manager [None req-191f9260-80f3-498c-9752-3dd1e1bfc50b tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: dd598b7a-057f-48ea-a31e-96e7ccadeb3d] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 806.976270] env[69367]: DEBUG oslo_concurrency.lockutils [None req-191f9260-80f3-498c-9752-3dd1e1bfc50b tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Acquiring lock "refresh_cache-dd598b7a-057f-48ea-a31e-96e7ccadeb3d" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 806.976514] env[69367]: DEBUG oslo_concurrency.lockutils [None req-191f9260-80f3-498c-9752-3dd1e1bfc50b tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Acquired lock "refresh_cache-dd598b7a-057f-48ea-a31e-96e7ccadeb3d" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 806.976765] env[69367]: DEBUG nova.network.neutron [None req-191f9260-80f3-498c-9752-3dd1e1bfc50b tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: dd598b7a-057f-48ea-a31e-96e7ccadeb3d] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 807.016674] env[69367]: DEBUG nova.network.neutron [None req-85ae9545-2286-457b-9c4a-a112188816da tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Updating instance_info_cache with network_info: [{"id": "00ad3cfd-f282-442d-b152-85e841dd8a16", "address": "fa:16:3e:17:02:f1", "network": {"id": "a8e5c1a6-2526-4042-8a8d-fdb54dbe7bf9", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1429177141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.254", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f5bc3d470905412ea72a8eedb98e9e47", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e6f11c0d-c73a-47f5-b02e-47bff48da0e4", "external-id": "nsx-vlan-transportzone-345", "segmentation_id": 345, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap00ad3cfd-f2", "ovs_interfaceid": "00ad3cfd-f282-442d-b152-85e841dd8a16", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "adb5a8dc-4297-4171-b593-e1230f1746c7", "address": "fa:16:3e:54:ac:29", "network": {"id": "a8e5c1a6-2526-4042-8a8d-fdb54dbe7bf9", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1429177141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f5bc3d470905412ea72a8eedb98e9e47", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e6f11c0d-c73a-47f5-b02e-47bff48da0e4", "external-id": "nsx-vlan-transportzone-345", "segmentation_id": 345, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapadb5a8dc-42", "ovs_interfaceid": "adb5a8dc-4297-4171-b593-e1230f1746c7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 807.138663] env[69367]: INFO nova.scheduler.client.report [None req-b883f15e-cdb3-40b5-b382-c4d5549547fd tempest-ServerShowV247Test-1137483895 tempest-ServerShowV247Test-1137483895-project-member] Deleted allocations for instance 17ffa2b1-4a0a-4e14-a7b0-104791adf072 [ 807.436712] env[69367]: DEBUG oslo_concurrency.lockutils [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 807.500962] env[69367]: DEBUG nova.network.neutron [None req-191f9260-80f3-498c-9752-3dd1e1bfc50b tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: dd598b7a-057f-48ea-a31e-96e7ccadeb3d] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 807.519835] env[69367]: DEBUG oslo_concurrency.lockutils [None req-85ae9545-2286-457b-9c4a-a112188816da tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Releasing lock "refresh_cache-837b4093-308b-440b-940d-fc0227a5c590" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 807.520519] env[69367]: DEBUG oslo_concurrency.lockutils [None req-85ae9545-2286-457b-9c4a-a112188816da tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Acquiring lock "837b4093-308b-440b-940d-fc0227a5c590" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 807.520691] env[69367]: DEBUG oslo_concurrency.lockutils [None req-85ae9545-2286-457b-9c4a-a112188816da tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Acquired lock "837b4093-308b-440b-940d-fc0227a5c590" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 807.523020] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cb471db-c942-4a32-bc1e-ffcfae7f752d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.544180] env[69367]: DEBUG nova.virt.hardware [None req-85ae9545-2286-457b-9c4a-a112188816da tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-19T12:49:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 807.544180] env[69367]: DEBUG nova.virt.hardware [None req-85ae9545-2286-457b-9c4a-a112188816da tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Flavor limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 807.544180] env[69367]: DEBUG nova.virt.hardware [None req-85ae9545-2286-457b-9c4a-a112188816da tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Image limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 807.544180] env[69367]: DEBUG nova.virt.hardware [None req-85ae9545-2286-457b-9c4a-a112188816da tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Flavor pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 807.544180] env[69367]: DEBUG nova.virt.hardware [None req-85ae9545-2286-457b-9c4a-a112188816da tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Image pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 807.544180] env[69367]: DEBUG nova.virt.hardware [None req-85ae9545-2286-457b-9c4a-a112188816da tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 807.544180] env[69367]: DEBUG nova.virt.hardware [None req-85ae9545-2286-457b-9c4a-a112188816da tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 807.544180] env[69367]: DEBUG nova.virt.hardware [None req-85ae9545-2286-457b-9c4a-a112188816da tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 807.544180] env[69367]: DEBUG nova.virt.hardware [None req-85ae9545-2286-457b-9c4a-a112188816da tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Got 1 possible topologies {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 807.544701] env[69367]: DEBUG nova.virt.hardware [None req-85ae9545-2286-457b-9c4a-a112188816da tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 807.545089] env[69367]: DEBUG nova.virt.hardware [None req-85ae9545-2286-457b-9c4a-a112188816da tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 807.551669] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-85ae9545-2286-457b-9c4a-a112188816da tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Reconfiguring VM to attach interface {{(pid=69367) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 807.554461] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4d5751ae-8751-4b8d-bed8-bbfcd75dc35d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.579095] env[69367]: DEBUG oslo_vmware.api [None req-85ae9545-2286-457b-9c4a-a112188816da tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Waiting for the task: (returnval){ [ 807.579095] env[69367]: value = "task-4234000" [ 807.579095] env[69367]: _type = "Task" [ 807.579095] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.590417] env[69367]: DEBUG oslo_vmware.api [None req-85ae9545-2286-457b-9c4a-a112188816da tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Task: {'id': task-4234000, 'name': ReconfigVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.618741] env[69367]: DEBUG nova.network.neutron [None req-191f9260-80f3-498c-9752-3dd1e1bfc50b tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: dd598b7a-057f-48ea-a31e-96e7ccadeb3d] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 807.660199] env[69367]: DEBUG oslo_concurrency.lockutils [None req-b883f15e-cdb3-40b5-b382-c4d5549547fd tempest-ServerShowV247Test-1137483895 tempest-ServerShowV247Test-1137483895-project-member] Lock "17ffa2b1-4a0a-4e14-a7b0-104791adf072" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 72.447s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 807.748718] env[69367]: DEBUG nova.compute.manager [req-fa8a26d6-23de-4ac7-9e28-d888f9a4cd20 req-368b2962-403d-4ae0-aece-6976ae0ee53a service nova] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Received event network-changed-adb5a8dc-4297-4171-b593-e1230f1746c7 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 807.748862] env[69367]: DEBUG nova.compute.manager [req-fa8a26d6-23de-4ac7-9e28-d888f9a4cd20 req-368b2962-403d-4ae0-aece-6976ae0ee53a service nova] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Refreshing instance network info cache due to event network-changed-adb5a8dc-4297-4171-b593-e1230f1746c7. {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11772}} [ 807.749613] env[69367]: DEBUG oslo_concurrency.lockutils [req-fa8a26d6-23de-4ac7-9e28-d888f9a4cd20 req-368b2962-403d-4ae0-aece-6976ae0ee53a service nova] Acquiring lock "refresh_cache-837b4093-308b-440b-940d-fc0227a5c590" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 807.749613] env[69367]: DEBUG oslo_concurrency.lockutils [req-fa8a26d6-23de-4ac7-9e28-d888f9a4cd20 req-368b2962-403d-4ae0-aece-6976ae0ee53a service nova] Acquired lock "refresh_cache-837b4093-308b-440b-940d-fc0227a5c590" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 807.749613] env[69367]: DEBUG nova.network.neutron [req-fa8a26d6-23de-4ac7-9e28-d888f9a4cd20 req-368b2962-403d-4ae0-aece-6976ae0ee53a service nova] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Refreshing network info cache for port adb5a8dc-4297-4171-b593-e1230f1746c7 {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 808.013449] env[69367]: DEBUG nova.scheduler.client.report [None req-3172fd0f-e37a-4da4-b881-902c58437d42 tempest-ServerMetadataNegativeTestJSON-995045374 tempest-ServerMetadataNegativeTestJSON-995045374-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 808.041632] env[69367]: DEBUG nova.scheduler.client.report [None req-3172fd0f-e37a-4da4-b881-902c58437d42 tempest-ServerMetadataNegativeTestJSON-995045374 tempest-ServerMetadataNegativeTestJSON-995045374-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 808.041873] env[69367]: DEBUG nova.compute.provider_tree [None req-3172fd0f-e37a-4da4-b881-902c58437d42 tempest-ServerMetadataNegativeTestJSON-995045374 tempest-ServerMetadataNegativeTestJSON-995045374-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 808.065570] env[69367]: DEBUG nova.scheduler.client.report [None req-3172fd0f-e37a-4da4-b881-902c58437d42 tempest-ServerMetadataNegativeTestJSON-995045374 tempest-ServerMetadataNegativeTestJSON-995045374-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 808.092630] env[69367]: DEBUG oslo_vmware.api [None req-85ae9545-2286-457b-9c4a-a112188816da tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Task: {'id': task-4234000, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.093877] env[69367]: DEBUG nova.scheduler.client.report [None req-3172fd0f-e37a-4da4-b881-902c58437d42 tempest-ServerMetadataNegativeTestJSON-995045374 tempest-ServerMetadataNegativeTestJSON-995045374-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 808.122640] env[69367]: DEBUG oslo_concurrency.lockutils [None req-191f9260-80f3-498c-9752-3dd1e1bfc50b tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Releasing lock "refresh_cache-dd598b7a-057f-48ea-a31e-96e7ccadeb3d" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 808.123067] env[69367]: DEBUG nova.compute.manager [None req-191f9260-80f3-498c-9752-3dd1e1bfc50b tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 808.123223] env[69367]: DEBUG nova.compute.manager [None req-191f9260-80f3-498c-9752-3dd1e1bfc50b tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: dd598b7a-057f-48ea-a31e-96e7ccadeb3d] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 808.123405] env[69367]: DEBUG nova.network.neutron [None req-191f9260-80f3-498c-9752-3dd1e1bfc50b tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: dd598b7a-057f-48ea-a31e-96e7ccadeb3d] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 808.146414] env[69367]: DEBUG nova.network.neutron [None req-191f9260-80f3-498c-9752-3dd1e1bfc50b tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: dd598b7a-057f-48ea-a31e-96e7ccadeb3d] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 808.163042] env[69367]: DEBUG nova.compute.manager [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] [instance: c272b0ae-6313-46ab-977c-6de255e77675] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 808.510951] env[69367]: DEBUG nova.network.neutron [req-fa8a26d6-23de-4ac7-9e28-d888f9a4cd20 req-368b2962-403d-4ae0-aece-6976ae0ee53a service nova] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Updated VIF entry in instance network info cache for port adb5a8dc-4297-4171-b593-e1230f1746c7. {{(pid=69367) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 808.511384] env[69367]: DEBUG nova.network.neutron [req-fa8a26d6-23de-4ac7-9e28-d888f9a4cd20 req-368b2962-403d-4ae0-aece-6976ae0ee53a service nova] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Updating instance_info_cache with network_info: [{"id": "00ad3cfd-f282-442d-b152-85e841dd8a16", "address": "fa:16:3e:17:02:f1", "network": {"id": "a8e5c1a6-2526-4042-8a8d-fdb54dbe7bf9", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1429177141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.254", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f5bc3d470905412ea72a8eedb98e9e47", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e6f11c0d-c73a-47f5-b02e-47bff48da0e4", "external-id": "nsx-vlan-transportzone-345", "segmentation_id": 345, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap00ad3cfd-f2", "ovs_interfaceid": "00ad3cfd-f282-442d-b152-85e841dd8a16", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "adb5a8dc-4297-4171-b593-e1230f1746c7", "address": "fa:16:3e:54:ac:29", "network": {"id": "a8e5c1a6-2526-4042-8a8d-fdb54dbe7bf9", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1429177141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f5bc3d470905412ea72a8eedb98e9e47", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e6f11c0d-c73a-47f5-b02e-47bff48da0e4", "external-id": "nsx-vlan-transportzone-345", "segmentation_id": 345, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapadb5a8dc-42", "ovs_interfaceid": "adb5a8dc-4297-4171-b593-e1230f1746c7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 808.560326] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-588f693a-a3d8-4502-b567-4f77033e472f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.568987] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-501a4eba-19a9-467d-9a8c-c3d3525ce7a4 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.602661] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fed54ed5-72d1-4b8e-b9df-dc5a6f3ba21a {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.613841] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1c63f25-ab95-4699-b008-4e0bb71b7ca9 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.617970] env[69367]: DEBUG oslo_vmware.api [None req-85ae9545-2286-457b-9c4a-a112188816da tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Task: {'id': task-4234000, 'name': ReconfigVM_Task, 'duration_secs': 0.646971} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.618488] env[69367]: DEBUG oslo_concurrency.lockutils [None req-85ae9545-2286-457b-9c4a-a112188816da tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Releasing lock "837b4093-308b-440b-940d-fc0227a5c590" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 808.618696] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-85ae9545-2286-457b-9c4a-a112188816da tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Reconfigured VM to attach interface {{(pid=69367) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 808.634972] env[69367]: DEBUG nova.compute.provider_tree [None req-3172fd0f-e37a-4da4-b881-902c58437d42 tempest-ServerMetadataNegativeTestJSON-995045374 tempest-ServerMetadataNegativeTestJSON-995045374-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 808.649476] env[69367]: DEBUG nova.network.neutron [None req-191f9260-80f3-498c-9752-3dd1e1bfc50b tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: dd598b7a-057f-48ea-a31e-96e7ccadeb3d] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 808.697074] env[69367]: DEBUG oslo_concurrency.lockutils [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 809.019176] env[69367]: DEBUG oslo_concurrency.lockutils [req-fa8a26d6-23de-4ac7-9e28-d888f9a4cd20 req-368b2962-403d-4ae0-aece-6976ae0ee53a service nova] Releasing lock "refresh_cache-837b4093-308b-440b-940d-fc0227a5c590" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 809.124374] env[69367]: DEBUG oslo_concurrency.lockutils [None req-85ae9545-2286-457b-9c4a-a112188816da tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Lock "interface-837b4093-308b-440b-940d-fc0227a5c590-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.866s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 809.154309] env[69367]: INFO nova.compute.manager [None req-191f9260-80f3-498c-9752-3dd1e1bfc50b tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: dd598b7a-057f-48ea-a31e-96e7ccadeb3d] Took 1.03 seconds to deallocate network for instance. [ 809.165075] env[69367]: ERROR nova.scheduler.client.report [None req-3172fd0f-e37a-4da4-b881-902c58437d42 tempest-ServerMetadataNegativeTestJSON-995045374 tempest-ServerMetadataNegativeTestJSON-995045374-project-member] [req-a361e01b-670e-4a5d-8f2f-e51252460ccb] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-a361e01b-670e-4a5d-8f2f-e51252460ccb"}]} [ 809.165500] env[69367]: DEBUG oslo_concurrency.lockutils [None req-3172fd0f-e37a-4da4-b881-902c58437d42 tempest-ServerMetadataNegativeTestJSON-995045374 tempest-ServerMetadataNegativeTestJSON-995045374-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.195s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 809.166102] env[69367]: ERROR nova.compute.manager [None req-3172fd0f-e37a-4da4-b881-902c58437d42 tempest-ServerMetadataNegativeTestJSON-995045374 tempest-ServerMetadataNegativeTestJSON-995045374-project-member] [instance: ac440ec4-8b1a-465a-a84d-66e8c823836b] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 809.166102] env[69367]: ERROR nova.compute.manager [instance: ac440ec4-8b1a-465a-a84d-66e8c823836b] Traceback (most recent call last): [ 809.166102] env[69367]: ERROR nova.compute.manager [instance: ac440ec4-8b1a-465a-a84d-66e8c823836b] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 809.166102] env[69367]: ERROR nova.compute.manager [instance: ac440ec4-8b1a-465a-a84d-66e8c823836b] yield [ 809.166102] env[69367]: ERROR nova.compute.manager [instance: ac440ec4-8b1a-465a-a84d-66e8c823836b] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 809.166102] env[69367]: ERROR nova.compute.manager [instance: ac440ec4-8b1a-465a-a84d-66e8c823836b] self.set_inventory_for_provider( [ 809.166102] env[69367]: ERROR nova.compute.manager [instance: ac440ec4-8b1a-465a-a84d-66e8c823836b] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 809.166102] env[69367]: ERROR nova.compute.manager [instance: ac440ec4-8b1a-465a-a84d-66e8c823836b] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 809.166102] env[69367]: ERROR nova.compute.manager [instance: ac440ec4-8b1a-465a-a84d-66e8c823836b] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-a361e01b-670e-4a5d-8f2f-e51252460ccb"}]} [ 809.166102] env[69367]: ERROR nova.compute.manager [instance: ac440ec4-8b1a-465a-a84d-66e8c823836b] [ 809.166102] env[69367]: ERROR nova.compute.manager [instance: ac440ec4-8b1a-465a-a84d-66e8c823836b] During handling of the above exception, another exception occurred: [ 809.166102] env[69367]: ERROR nova.compute.manager [instance: ac440ec4-8b1a-465a-a84d-66e8c823836b] [ 809.166102] env[69367]: ERROR nova.compute.manager [instance: ac440ec4-8b1a-465a-a84d-66e8c823836b] Traceback (most recent call last): [ 809.166102] env[69367]: ERROR nova.compute.manager [instance: ac440ec4-8b1a-465a-a84d-66e8c823836b] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 809.166102] env[69367]: ERROR nova.compute.manager [instance: ac440ec4-8b1a-465a-a84d-66e8c823836b] with self.rt.instance_claim(context, instance, node, allocs, [ 809.166102] env[69367]: ERROR nova.compute.manager [instance: ac440ec4-8b1a-465a-a84d-66e8c823836b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 809.166102] env[69367]: ERROR nova.compute.manager [instance: ac440ec4-8b1a-465a-a84d-66e8c823836b] return f(*args, **kwargs) [ 809.166102] env[69367]: ERROR nova.compute.manager [instance: ac440ec4-8b1a-465a-a84d-66e8c823836b] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 809.166102] env[69367]: ERROR nova.compute.manager [instance: ac440ec4-8b1a-465a-a84d-66e8c823836b] self._update(elevated, cn) [ 809.166102] env[69367]: ERROR nova.compute.manager [instance: ac440ec4-8b1a-465a-a84d-66e8c823836b] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 809.166102] env[69367]: ERROR nova.compute.manager [instance: ac440ec4-8b1a-465a-a84d-66e8c823836b] self._update_to_placement(context, compute_node, startup) [ 809.166102] env[69367]: ERROR nova.compute.manager [instance: ac440ec4-8b1a-465a-a84d-66e8c823836b] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 809.166102] env[69367]: ERROR nova.compute.manager [instance: ac440ec4-8b1a-465a-a84d-66e8c823836b] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 809.166102] env[69367]: ERROR nova.compute.manager [instance: ac440ec4-8b1a-465a-a84d-66e8c823836b] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 809.166102] env[69367]: ERROR nova.compute.manager [instance: ac440ec4-8b1a-465a-a84d-66e8c823836b] return attempt.get(self._wrap_exception) [ 809.166102] env[69367]: ERROR nova.compute.manager [instance: ac440ec4-8b1a-465a-a84d-66e8c823836b] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 809.166102] env[69367]: ERROR nova.compute.manager [instance: ac440ec4-8b1a-465a-a84d-66e8c823836b] six.reraise(self.value[0], self.value[1], self.value[2]) [ 809.166102] env[69367]: ERROR nova.compute.manager [instance: ac440ec4-8b1a-465a-a84d-66e8c823836b] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 809.166102] env[69367]: ERROR nova.compute.manager [instance: ac440ec4-8b1a-465a-a84d-66e8c823836b] raise value [ 809.166102] env[69367]: ERROR nova.compute.manager [instance: ac440ec4-8b1a-465a-a84d-66e8c823836b] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 809.166102] env[69367]: ERROR nova.compute.manager [instance: ac440ec4-8b1a-465a-a84d-66e8c823836b] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 809.166102] env[69367]: ERROR nova.compute.manager [instance: ac440ec4-8b1a-465a-a84d-66e8c823836b] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 809.166102] env[69367]: ERROR nova.compute.manager [instance: ac440ec4-8b1a-465a-a84d-66e8c823836b] self.reportclient.update_from_provider_tree( [ 809.166102] env[69367]: ERROR nova.compute.manager [instance: ac440ec4-8b1a-465a-a84d-66e8c823836b] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 809.167492] env[69367]: ERROR nova.compute.manager [instance: ac440ec4-8b1a-465a-a84d-66e8c823836b] with catch_all(pd.uuid): [ 809.167492] env[69367]: ERROR nova.compute.manager [instance: ac440ec4-8b1a-465a-a84d-66e8c823836b] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 809.167492] env[69367]: ERROR nova.compute.manager [instance: ac440ec4-8b1a-465a-a84d-66e8c823836b] self.gen.throw(typ, value, traceback) [ 809.167492] env[69367]: ERROR nova.compute.manager [instance: ac440ec4-8b1a-465a-a84d-66e8c823836b] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 809.167492] env[69367]: ERROR nova.compute.manager [instance: ac440ec4-8b1a-465a-a84d-66e8c823836b] raise exception.ResourceProviderSyncFailed() [ 809.167492] env[69367]: ERROR nova.compute.manager [instance: ac440ec4-8b1a-465a-a84d-66e8c823836b] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 809.167492] env[69367]: ERROR nova.compute.manager [instance: ac440ec4-8b1a-465a-a84d-66e8c823836b] [ 809.167492] env[69367]: DEBUG nova.compute.utils [None req-3172fd0f-e37a-4da4-b881-902c58437d42 tempest-ServerMetadataNegativeTestJSON-995045374 tempest-ServerMetadataNegativeTestJSON-995045374-project-member] [instance: ac440ec4-8b1a-465a-a84d-66e8c823836b] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 809.168257] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5687842f-f711-41da-ac00-7ef2110513fe tempest-ServerRescueTestJSONUnderV235-356240681 tempest-ServerRescueTestJSONUnderV235-356240681-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.785s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 809.169459] env[69367]: INFO nova.compute.claims [None req-5687842f-f711-41da-ac00-7ef2110513fe tempest-ServerRescueTestJSONUnderV235-356240681 tempest-ServerRescueTestJSONUnderV235-356240681-project-member] [instance: b6d326ff-45aa-44b6-b99c-95edca647e2c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 809.174058] env[69367]: DEBUG nova.compute.manager [None req-3172fd0f-e37a-4da4-b881-902c58437d42 tempest-ServerMetadataNegativeTestJSON-995045374 tempest-ServerMetadataNegativeTestJSON-995045374-project-member] [instance: ac440ec4-8b1a-465a-a84d-66e8c823836b] Build of instance ac440ec4-8b1a-465a-a84d-66e8c823836b was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 809.174848] env[69367]: DEBUG nova.compute.manager [None req-3172fd0f-e37a-4da4-b881-902c58437d42 tempest-ServerMetadataNegativeTestJSON-995045374 tempest-ServerMetadataNegativeTestJSON-995045374-project-member] [instance: ac440ec4-8b1a-465a-a84d-66e8c823836b] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 809.177058] env[69367]: DEBUG oslo_concurrency.lockutils [None req-3172fd0f-e37a-4da4-b881-902c58437d42 tempest-ServerMetadataNegativeTestJSON-995045374 tempest-ServerMetadataNegativeTestJSON-995045374-project-member] Acquiring lock "refresh_cache-ac440ec4-8b1a-465a-a84d-66e8c823836b" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 809.177221] env[69367]: DEBUG oslo_concurrency.lockutils [None req-3172fd0f-e37a-4da4-b881-902c58437d42 tempest-ServerMetadataNegativeTestJSON-995045374 tempest-ServerMetadataNegativeTestJSON-995045374-project-member] Acquired lock "refresh_cache-ac440ec4-8b1a-465a-a84d-66e8c823836b" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 809.177409] env[69367]: DEBUG nova.network.neutron [None req-3172fd0f-e37a-4da4-b881-902c58437d42 tempest-ServerMetadataNegativeTestJSON-995045374 tempest-ServerMetadataNegativeTestJSON-995045374-project-member] [instance: ac440ec4-8b1a-465a-a84d-66e8c823836b] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 809.337803] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Acquiring lock "05aae150-5d86-4210-ae7e-8c63e83cb907" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 809.338072] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Lock "05aae150-5d86-4210-ae7e-8c63e83cb907" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 809.706987] env[69367]: DEBUG nova.network.neutron [None req-3172fd0f-e37a-4da4-b881-902c58437d42 tempest-ServerMetadataNegativeTestJSON-995045374 tempest-ServerMetadataNegativeTestJSON-995045374-project-member] [instance: ac440ec4-8b1a-465a-a84d-66e8c823836b] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 809.786291] env[69367]: DEBUG nova.network.neutron [None req-3172fd0f-e37a-4da4-b881-902c58437d42 tempest-ServerMetadataNegativeTestJSON-995045374 tempest-ServerMetadataNegativeTestJSON-995045374-project-member] [instance: ac440ec4-8b1a-465a-a84d-66e8c823836b] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 810.205497] env[69367]: DEBUG nova.scheduler.client.report [None req-5687842f-f711-41da-ac00-7ef2110513fe tempest-ServerRescueTestJSONUnderV235-356240681 tempest-ServerRescueTestJSONUnderV235-356240681-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 810.209298] env[69367]: INFO nova.scheduler.client.report [None req-191f9260-80f3-498c-9752-3dd1e1bfc50b tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Deleted allocations for instance dd598b7a-057f-48ea-a31e-96e7ccadeb3d [ 810.231022] env[69367]: DEBUG nova.scheduler.client.report [None req-5687842f-f711-41da-ac00-7ef2110513fe tempest-ServerRescueTestJSONUnderV235-356240681 tempest-ServerRescueTestJSONUnderV235-356240681-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 810.231470] env[69367]: DEBUG nova.compute.provider_tree [None req-5687842f-f711-41da-ac00-7ef2110513fe tempest-ServerRescueTestJSONUnderV235-356240681 tempest-ServerRescueTestJSONUnderV235-356240681-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 810.245404] env[69367]: DEBUG nova.scheduler.client.report [None req-5687842f-f711-41da-ac00-7ef2110513fe tempest-ServerRescueTestJSONUnderV235-356240681 tempest-ServerRescueTestJSONUnderV235-356240681-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 810.281762] env[69367]: DEBUG nova.scheduler.client.report [None req-5687842f-f711-41da-ac00-7ef2110513fe tempest-ServerRescueTestJSONUnderV235-356240681 tempest-ServerRescueTestJSONUnderV235-356240681-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 810.288697] env[69367]: DEBUG oslo_concurrency.lockutils [None req-3172fd0f-e37a-4da4-b881-902c58437d42 tempest-ServerMetadataNegativeTestJSON-995045374 tempest-ServerMetadataNegativeTestJSON-995045374-project-member] Releasing lock "refresh_cache-ac440ec4-8b1a-465a-a84d-66e8c823836b" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 810.289720] env[69367]: DEBUG nova.compute.manager [None req-3172fd0f-e37a-4da4-b881-902c58437d42 tempest-ServerMetadataNegativeTestJSON-995045374 tempest-ServerMetadataNegativeTestJSON-995045374-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 810.289720] env[69367]: DEBUG nova.compute.manager [None req-3172fd0f-e37a-4da4-b881-902c58437d42 tempest-ServerMetadataNegativeTestJSON-995045374 tempest-ServerMetadataNegativeTestJSON-995045374-project-member] [instance: ac440ec4-8b1a-465a-a84d-66e8c823836b] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 810.289720] env[69367]: DEBUG nova.network.neutron [None req-3172fd0f-e37a-4da4-b881-902c58437d42 tempest-ServerMetadataNegativeTestJSON-995045374 tempest-ServerMetadataNegativeTestJSON-995045374-project-member] [instance: ac440ec4-8b1a-465a-a84d-66e8c823836b] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 810.316118] env[69367]: DEBUG nova.network.neutron [None req-3172fd0f-e37a-4da4-b881-902c58437d42 tempest-ServerMetadataNegativeTestJSON-995045374 tempest-ServerMetadataNegativeTestJSON-995045374-project-member] [instance: ac440ec4-8b1a-465a-a84d-66e8c823836b] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 810.721159] env[69367]: DEBUG oslo_concurrency.lockutils [None req-191f9260-80f3-498c-9752-3dd1e1bfc50b tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Lock "dd598b7a-057f-48ea-a31e-96e7ccadeb3d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 65.107s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 810.821295] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1e4f7f8-2d12-4b01-a184-bf18454d5cc3 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.825164] env[69367]: DEBUG nova.network.neutron [None req-3172fd0f-e37a-4da4-b881-902c58437d42 tempest-ServerMetadataNegativeTestJSON-995045374 tempest-ServerMetadataNegativeTestJSON-995045374-project-member] [instance: ac440ec4-8b1a-465a-a84d-66e8c823836b] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 810.836157] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2ac73f2-1f42-4829-afd1-a46acc89daa1 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.873102] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cc7430d-1415-47c1-a692-6bcc06162383 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.882404] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3efaebe-73dc-4a18-818a-d044dbab3e36 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.898369] env[69367]: DEBUG nova.compute.provider_tree [None req-5687842f-f711-41da-ac00-7ef2110513fe tempest-ServerRescueTestJSONUnderV235-356240681 tempest-ServerRescueTestJSONUnderV235-356240681-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 811.142612] env[69367]: DEBUG oslo_concurrency.lockutils [None req-cd3b457f-f206-4206-8992-b8b6ee33c044 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Acquiring lock "interface-837b4093-308b-440b-940d-fc0227a5c590-adb5a8dc-4297-4171-b593-e1230f1746c7" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 811.142898] env[69367]: DEBUG oslo_concurrency.lockutils [None req-cd3b457f-f206-4206-8992-b8b6ee33c044 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Lock "interface-837b4093-308b-440b-940d-fc0227a5c590-adb5a8dc-4297-4171-b593-e1230f1746c7" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 811.229636] env[69367]: DEBUG nova.compute.manager [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 4a46d003-f57e-4089-aa60-757a4246f071] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 811.328182] env[69367]: INFO nova.compute.manager [None req-3172fd0f-e37a-4da4-b881-902c58437d42 tempest-ServerMetadataNegativeTestJSON-995045374 tempest-ServerMetadataNegativeTestJSON-995045374-project-member] [instance: ac440ec4-8b1a-465a-a84d-66e8c823836b] Took 1.04 seconds to deallocate network for instance. [ 811.427704] env[69367]: ERROR nova.scheduler.client.report [None req-5687842f-f711-41da-ac00-7ef2110513fe tempest-ServerRescueTestJSONUnderV235-356240681 tempest-ServerRescueTestJSONUnderV235-356240681-project-member] [req-2ff31cd2-542b-4279-8b4a-524e33e6e93b] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-2ff31cd2-542b-4279-8b4a-524e33e6e93b"}]} [ 811.428106] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5687842f-f711-41da-ac00-7ef2110513fe tempest-ServerRescueTestJSONUnderV235-356240681 tempest-ServerRescueTestJSONUnderV235-356240681-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.260s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 811.428708] env[69367]: ERROR nova.compute.manager [None req-5687842f-f711-41da-ac00-7ef2110513fe tempest-ServerRescueTestJSONUnderV235-356240681 tempest-ServerRescueTestJSONUnderV235-356240681-project-member] [instance: b6d326ff-45aa-44b6-b99c-95edca647e2c] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 811.428708] env[69367]: ERROR nova.compute.manager [instance: b6d326ff-45aa-44b6-b99c-95edca647e2c] Traceback (most recent call last): [ 811.428708] env[69367]: ERROR nova.compute.manager [instance: b6d326ff-45aa-44b6-b99c-95edca647e2c] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 811.428708] env[69367]: ERROR nova.compute.manager [instance: b6d326ff-45aa-44b6-b99c-95edca647e2c] yield [ 811.428708] env[69367]: ERROR nova.compute.manager [instance: b6d326ff-45aa-44b6-b99c-95edca647e2c] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 811.428708] env[69367]: ERROR nova.compute.manager [instance: b6d326ff-45aa-44b6-b99c-95edca647e2c] self.set_inventory_for_provider( [ 811.428708] env[69367]: ERROR nova.compute.manager [instance: b6d326ff-45aa-44b6-b99c-95edca647e2c] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 811.428708] env[69367]: ERROR nova.compute.manager [instance: b6d326ff-45aa-44b6-b99c-95edca647e2c] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 811.428708] env[69367]: ERROR nova.compute.manager [instance: b6d326ff-45aa-44b6-b99c-95edca647e2c] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-2ff31cd2-542b-4279-8b4a-524e33e6e93b"}]} [ 811.428708] env[69367]: ERROR nova.compute.manager [instance: b6d326ff-45aa-44b6-b99c-95edca647e2c] [ 811.428708] env[69367]: ERROR nova.compute.manager [instance: b6d326ff-45aa-44b6-b99c-95edca647e2c] During handling of the above exception, another exception occurred: [ 811.428708] env[69367]: ERROR nova.compute.manager [instance: b6d326ff-45aa-44b6-b99c-95edca647e2c] [ 811.428708] env[69367]: ERROR nova.compute.manager [instance: b6d326ff-45aa-44b6-b99c-95edca647e2c] Traceback (most recent call last): [ 811.428708] env[69367]: ERROR nova.compute.manager [instance: b6d326ff-45aa-44b6-b99c-95edca647e2c] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 811.428708] env[69367]: ERROR nova.compute.manager [instance: b6d326ff-45aa-44b6-b99c-95edca647e2c] with self.rt.instance_claim(context, instance, node, allocs, [ 811.428708] env[69367]: ERROR nova.compute.manager [instance: b6d326ff-45aa-44b6-b99c-95edca647e2c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 811.428708] env[69367]: ERROR nova.compute.manager [instance: b6d326ff-45aa-44b6-b99c-95edca647e2c] return f(*args, **kwargs) [ 811.428708] env[69367]: ERROR nova.compute.manager [instance: b6d326ff-45aa-44b6-b99c-95edca647e2c] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 811.428708] env[69367]: ERROR nova.compute.manager [instance: b6d326ff-45aa-44b6-b99c-95edca647e2c] self._update(elevated, cn) [ 811.428708] env[69367]: ERROR nova.compute.manager [instance: b6d326ff-45aa-44b6-b99c-95edca647e2c] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 811.428708] env[69367]: ERROR nova.compute.manager [instance: b6d326ff-45aa-44b6-b99c-95edca647e2c] self._update_to_placement(context, compute_node, startup) [ 811.428708] env[69367]: ERROR nova.compute.manager [instance: b6d326ff-45aa-44b6-b99c-95edca647e2c] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 811.428708] env[69367]: ERROR nova.compute.manager [instance: b6d326ff-45aa-44b6-b99c-95edca647e2c] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 811.428708] env[69367]: ERROR nova.compute.manager [instance: b6d326ff-45aa-44b6-b99c-95edca647e2c] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 811.428708] env[69367]: ERROR nova.compute.manager [instance: b6d326ff-45aa-44b6-b99c-95edca647e2c] return attempt.get(self._wrap_exception) [ 811.428708] env[69367]: ERROR nova.compute.manager [instance: b6d326ff-45aa-44b6-b99c-95edca647e2c] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 811.428708] env[69367]: ERROR nova.compute.manager [instance: b6d326ff-45aa-44b6-b99c-95edca647e2c] six.reraise(self.value[0], self.value[1], self.value[2]) [ 811.428708] env[69367]: ERROR nova.compute.manager [instance: b6d326ff-45aa-44b6-b99c-95edca647e2c] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 811.428708] env[69367]: ERROR nova.compute.manager [instance: b6d326ff-45aa-44b6-b99c-95edca647e2c] raise value [ 811.428708] env[69367]: ERROR nova.compute.manager [instance: b6d326ff-45aa-44b6-b99c-95edca647e2c] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 811.428708] env[69367]: ERROR nova.compute.manager [instance: b6d326ff-45aa-44b6-b99c-95edca647e2c] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 811.428708] env[69367]: ERROR nova.compute.manager [instance: b6d326ff-45aa-44b6-b99c-95edca647e2c] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 811.428708] env[69367]: ERROR nova.compute.manager [instance: b6d326ff-45aa-44b6-b99c-95edca647e2c] self.reportclient.update_from_provider_tree( [ 811.428708] env[69367]: ERROR nova.compute.manager [instance: b6d326ff-45aa-44b6-b99c-95edca647e2c] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 811.430672] env[69367]: ERROR nova.compute.manager [instance: b6d326ff-45aa-44b6-b99c-95edca647e2c] with catch_all(pd.uuid): [ 811.430672] env[69367]: ERROR nova.compute.manager [instance: b6d326ff-45aa-44b6-b99c-95edca647e2c] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 811.430672] env[69367]: ERROR nova.compute.manager [instance: b6d326ff-45aa-44b6-b99c-95edca647e2c] self.gen.throw(typ, value, traceback) [ 811.430672] env[69367]: ERROR nova.compute.manager [instance: b6d326ff-45aa-44b6-b99c-95edca647e2c] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 811.430672] env[69367]: ERROR nova.compute.manager [instance: b6d326ff-45aa-44b6-b99c-95edca647e2c] raise exception.ResourceProviderSyncFailed() [ 811.430672] env[69367]: ERROR nova.compute.manager [instance: b6d326ff-45aa-44b6-b99c-95edca647e2c] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 811.430672] env[69367]: ERROR nova.compute.manager [instance: b6d326ff-45aa-44b6-b99c-95edca647e2c] [ 811.430672] env[69367]: DEBUG nova.compute.utils [None req-5687842f-f711-41da-ac00-7ef2110513fe tempest-ServerRescueTestJSONUnderV235-356240681 tempest-ServerRescueTestJSONUnderV235-356240681-project-member] [instance: b6d326ff-45aa-44b6-b99c-95edca647e2c] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 811.431678] env[69367]: DEBUG oslo_concurrency.lockutils [None req-29fe1fd3-03fb-4277-958e-0ff9f92cf48e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.744s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 811.432215] env[69367]: DEBUG nova.objects.instance [None req-29fe1fd3-03fb-4277-958e-0ff9f92cf48e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Lazy-loading 'resources' on Instance uuid 7f937d89-684b-44f5-9f30-783aeafe99d1 {{(pid=69367) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 811.437287] env[69367]: DEBUG nova.compute.manager [None req-5687842f-f711-41da-ac00-7ef2110513fe tempest-ServerRescueTestJSONUnderV235-356240681 tempest-ServerRescueTestJSONUnderV235-356240681-project-member] [instance: b6d326ff-45aa-44b6-b99c-95edca647e2c] Build of instance b6d326ff-45aa-44b6-b99c-95edca647e2c was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 811.437718] env[69367]: DEBUG nova.compute.manager [None req-5687842f-f711-41da-ac00-7ef2110513fe tempest-ServerRescueTestJSONUnderV235-356240681 tempest-ServerRescueTestJSONUnderV235-356240681-project-member] [instance: b6d326ff-45aa-44b6-b99c-95edca647e2c] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 811.437961] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5687842f-f711-41da-ac00-7ef2110513fe tempest-ServerRescueTestJSONUnderV235-356240681 tempest-ServerRescueTestJSONUnderV235-356240681-project-member] Acquiring lock "refresh_cache-b6d326ff-45aa-44b6-b99c-95edca647e2c" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 811.438145] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5687842f-f711-41da-ac00-7ef2110513fe tempest-ServerRescueTestJSONUnderV235-356240681 tempest-ServerRescueTestJSONUnderV235-356240681-project-member] Acquired lock "refresh_cache-b6d326ff-45aa-44b6-b99c-95edca647e2c" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 811.438314] env[69367]: DEBUG nova.network.neutron [None req-5687842f-f711-41da-ac00-7ef2110513fe tempest-ServerRescueTestJSONUnderV235-356240681 tempest-ServerRescueTestJSONUnderV235-356240681-project-member] [instance: b6d326ff-45aa-44b6-b99c-95edca647e2c] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 811.646575] env[69367]: DEBUG oslo_concurrency.lockutils [None req-cd3b457f-f206-4206-8992-b8b6ee33c044 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Acquiring lock "837b4093-308b-440b-940d-fc0227a5c590" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 811.646784] env[69367]: DEBUG oslo_concurrency.lockutils [None req-cd3b457f-f206-4206-8992-b8b6ee33c044 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Acquired lock "837b4093-308b-440b-940d-fc0227a5c590" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 811.647721] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1195fffc-3fa1-4a64-a173-ef541a5a80ee {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.667348] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f057781-d391-41ad-a7ab-445ead8ee503 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.697430] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-cd3b457f-f206-4206-8992-b8b6ee33c044 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Reconfiguring VM to detach interface {{(pid=69367) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 811.697907] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-23612379-d7c7-42be-9edb-97c734057176 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.720104] env[69367]: DEBUG oslo_vmware.api [None req-cd3b457f-f206-4206-8992-b8b6ee33c044 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Waiting for the task: (returnval){ [ 811.720104] env[69367]: value = "task-4234001" [ 811.720104] env[69367]: _type = "Task" [ 811.720104] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.729150] env[69367]: DEBUG oslo_vmware.api [None req-cd3b457f-f206-4206-8992-b8b6ee33c044 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Task: {'id': task-4234001, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.751214] env[69367]: DEBUG oslo_concurrency.lockutils [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 811.960934] env[69367]: DEBUG nova.scheduler.client.report [None req-29fe1fd3-03fb-4277-958e-0ff9f92cf48e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 811.968882] env[69367]: DEBUG nova.network.neutron [None req-5687842f-f711-41da-ac00-7ef2110513fe tempest-ServerRescueTestJSONUnderV235-356240681 tempest-ServerRescueTestJSONUnderV235-356240681-project-member] [instance: b6d326ff-45aa-44b6-b99c-95edca647e2c] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 811.981498] env[69367]: DEBUG nova.scheduler.client.report [None req-29fe1fd3-03fb-4277-958e-0ff9f92cf48e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 811.983295] env[69367]: DEBUG nova.compute.provider_tree [None req-29fe1fd3-03fb-4277-958e-0ff9f92cf48e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 811.999101] env[69367]: DEBUG nova.scheduler.client.report [None req-29fe1fd3-03fb-4277-958e-0ff9f92cf48e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 812.018263] env[69367]: DEBUG nova.scheduler.client.report [None req-29fe1fd3-03fb-4277-958e-0ff9f92cf48e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 812.126051] env[69367]: DEBUG oslo_concurrency.lockutils [None req-b99aa412-e4c2-4ca1-99d0-611e38feac1e tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Acquiring lock "42114002-28e0-408a-862e-547680ed479f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 812.126051] env[69367]: DEBUG oslo_concurrency.lockutils [None req-b99aa412-e4c2-4ca1-99d0-611e38feac1e tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Lock "42114002-28e0-408a-862e-547680ed479f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 812.163333] env[69367]: DEBUG nova.network.neutron [None req-5687842f-f711-41da-ac00-7ef2110513fe tempest-ServerRescueTestJSONUnderV235-356240681 tempest-ServerRescueTestJSONUnderV235-356240681-project-member] [instance: b6d326ff-45aa-44b6-b99c-95edca647e2c] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 812.231335] env[69367]: DEBUG oslo_vmware.api [None req-cd3b457f-f206-4206-8992-b8b6ee33c044 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Task: {'id': task-4234001, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.364191] env[69367]: INFO nova.scheduler.client.report [None req-3172fd0f-e37a-4da4-b881-902c58437d42 tempest-ServerMetadataNegativeTestJSON-995045374 tempest-ServerMetadataNegativeTestJSON-995045374-project-member] Deleted allocations for instance ac440ec4-8b1a-465a-a84d-66e8c823836b [ 812.531458] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ff86030-18b2-44fa-91bb-0f05b078467e {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.544505] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5585bf21-ce60-40c9-b64f-d5c29c2b3a79 tempest-ServersListShow2100Test-1441418592 tempest-ServersListShow2100Test-1441418592-project-member] Acquiring lock "097b74f5-19a1-41be-968d-19489ea9733c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 812.544505] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5585bf21-ce60-40c9-b64f-d5c29c2b3a79 tempest-ServersListShow2100Test-1441418592 tempest-ServersListShow2100Test-1441418592-project-member] Lock "097b74f5-19a1-41be-968d-19489ea9733c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 812.547775] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-251696bf-48b9-486d-b63d-a7374f11f2c2 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.581414] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32797189-b658-4c79-ae25-600ab6414c82 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.590084] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e2fe6e5-58e0-49c6-b847-3aee75490b5c {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.604654] env[69367]: DEBUG nova.compute.provider_tree [None req-29fe1fd3-03fb-4277-958e-0ff9f92cf48e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 812.666538] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5687842f-f711-41da-ac00-7ef2110513fe tempest-ServerRescueTestJSONUnderV235-356240681 tempest-ServerRescueTestJSONUnderV235-356240681-project-member] Releasing lock "refresh_cache-b6d326ff-45aa-44b6-b99c-95edca647e2c" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 812.666538] env[69367]: DEBUG nova.compute.manager [None req-5687842f-f711-41da-ac00-7ef2110513fe tempest-ServerRescueTestJSONUnderV235-356240681 tempest-ServerRescueTestJSONUnderV235-356240681-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 812.666716] env[69367]: DEBUG nova.compute.manager [None req-5687842f-f711-41da-ac00-7ef2110513fe tempest-ServerRescueTestJSONUnderV235-356240681 tempest-ServerRescueTestJSONUnderV235-356240681-project-member] [instance: b6d326ff-45aa-44b6-b99c-95edca647e2c] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 812.666889] env[69367]: DEBUG nova.network.neutron [None req-5687842f-f711-41da-ac00-7ef2110513fe tempest-ServerRescueTestJSONUnderV235-356240681 tempest-ServerRescueTestJSONUnderV235-356240681-project-member] [instance: b6d326ff-45aa-44b6-b99c-95edca647e2c] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 812.699990] env[69367]: DEBUG nova.network.neutron [None req-5687842f-f711-41da-ac00-7ef2110513fe tempest-ServerRescueTestJSONUnderV235-356240681 tempest-ServerRescueTestJSONUnderV235-356240681-project-member] [instance: b6d326ff-45aa-44b6-b99c-95edca647e2c] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 812.734800] env[69367]: DEBUG oslo_vmware.api [None req-cd3b457f-f206-4206-8992-b8b6ee33c044 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Task: {'id': task-4234001, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.874235] env[69367]: DEBUG oslo_concurrency.lockutils [None req-3172fd0f-e37a-4da4-b881-902c58437d42 tempest-ServerMetadataNegativeTestJSON-995045374 tempest-ServerMetadataNegativeTestJSON-995045374-project-member] Lock "ac440ec4-8b1a-465a-a84d-66e8c823836b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 64.386s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 813.131800] env[69367]: ERROR nova.scheduler.client.report [None req-29fe1fd3-03fb-4277-958e-0ff9f92cf48e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [req-8b0c120c-ba59-4be3-95e3-a3daf1389acc] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-8b0c120c-ba59-4be3-95e3-a3daf1389acc"}]} [ 813.132205] env[69367]: DEBUG oslo_concurrency.lockutils [None req-29fe1fd3-03fb-4277-958e-0ff9f92cf48e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.701s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 813.132839] env[69367]: ERROR nova.compute.manager [None req-29fe1fd3-03fb-4277-958e-0ff9f92cf48e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] Setting instance vm_state to ERROR: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 813.132839] env[69367]: ERROR nova.compute.manager [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] Traceback (most recent call last): [ 813.132839] env[69367]: ERROR nova.compute.manager [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 813.132839] env[69367]: ERROR nova.compute.manager [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] yield [ 813.132839] env[69367]: ERROR nova.compute.manager [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 813.132839] env[69367]: ERROR nova.compute.manager [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] self.set_inventory_for_provider( [ 813.132839] env[69367]: ERROR nova.compute.manager [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 813.132839] env[69367]: ERROR nova.compute.manager [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 813.132839] env[69367]: ERROR nova.compute.manager [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-8b0c120c-ba59-4be3-95e3-a3daf1389acc"}]} [ 813.132839] env[69367]: ERROR nova.compute.manager [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] [ 813.132839] env[69367]: ERROR nova.compute.manager [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] During handling of the above exception, another exception occurred: [ 813.132839] env[69367]: ERROR nova.compute.manager [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] [ 813.132839] env[69367]: ERROR nova.compute.manager [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] Traceback (most recent call last): [ 813.132839] env[69367]: ERROR nova.compute.manager [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 813.132839] env[69367]: ERROR nova.compute.manager [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] self._delete_instance(context, instance, bdms) [ 813.132839] env[69367]: ERROR nova.compute.manager [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 813.132839] env[69367]: ERROR nova.compute.manager [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] self._complete_deletion(context, instance) [ 813.132839] env[69367]: ERROR nova.compute.manager [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 813.132839] env[69367]: ERROR nova.compute.manager [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] self._update_resource_tracker(context, instance) [ 813.132839] env[69367]: ERROR nova.compute.manager [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 813.132839] env[69367]: ERROR nova.compute.manager [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] self.rt.update_usage(context, instance, instance.node) [ 813.132839] env[69367]: ERROR nova.compute.manager [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 813.132839] env[69367]: ERROR nova.compute.manager [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] return f(*args, **kwargs) [ 813.132839] env[69367]: ERROR nova.compute.manager [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 813.132839] env[69367]: ERROR nova.compute.manager [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] self._update(context.elevated(), self.compute_nodes[nodename]) [ 813.132839] env[69367]: ERROR nova.compute.manager [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 813.132839] env[69367]: ERROR nova.compute.manager [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] self._update_to_placement(context, compute_node, startup) [ 813.132839] env[69367]: ERROR nova.compute.manager [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 813.132839] env[69367]: ERROR nova.compute.manager [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 813.132839] env[69367]: ERROR nova.compute.manager [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 813.132839] env[69367]: ERROR nova.compute.manager [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] return attempt.get(self._wrap_exception) [ 813.132839] env[69367]: ERROR nova.compute.manager [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 813.132839] env[69367]: ERROR nova.compute.manager [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] six.reraise(self.value[0], self.value[1], self.value[2]) [ 813.132839] env[69367]: ERROR nova.compute.manager [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 813.134038] env[69367]: ERROR nova.compute.manager [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] raise value [ 813.134038] env[69367]: ERROR nova.compute.manager [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 813.134038] env[69367]: ERROR nova.compute.manager [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 813.134038] env[69367]: ERROR nova.compute.manager [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 813.134038] env[69367]: ERROR nova.compute.manager [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] self.reportclient.update_from_provider_tree( [ 813.134038] env[69367]: ERROR nova.compute.manager [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 813.134038] env[69367]: ERROR nova.compute.manager [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] with catch_all(pd.uuid): [ 813.134038] env[69367]: ERROR nova.compute.manager [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 813.134038] env[69367]: ERROR nova.compute.manager [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] self.gen.throw(typ, value, traceback) [ 813.134038] env[69367]: ERROR nova.compute.manager [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 813.134038] env[69367]: ERROR nova.compute.manager [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] raise exception.ResourceProviderSyncFailed() [ 813.134038] env[69367]: ERROR nova.compute.manager [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 813.134038] env[69367]: ERROR nova.compute.manager [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] [ 813.136460] env[69367]: DEBUG oslo_concurrency.lockutils [None req-732e6b23-494d-4673-a2d3-4e5758dc78a7 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.171s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 813.136696] env[69367]: DEBUG nova.objects.instance [None req-732e6b23-494d-4673-a2d3-4e5758dc78a7 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Lazy-loading 'resources' on Instance uuid ab9d8e3e-65c5-4ac9-920f-3042b8cf2054 {{(pid=69367) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 813.203287] env[69367]: DEBUG nova.network.neutron [None req-5687842f-f711-41da-ac00-7ef2110513fe tempest-ServerRescueTestJSONUnderV235-356240681 tempest-ServerRescueTestJSONUnderV235-356240681-project-member] [instance: b6d326ff-45aa-44b6-b99c-95edca647e2c] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 813.236173] env[69367]: DEBUG oslo_vmware.api [None req-cd3b457f-f206-4206-8992-b8b6ee33c044 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Task: {'id': task-4234001, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.377743] env[69367]: DEBUG nova.compute.manager [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 46b6bc45-57f0-4850-9249-6bbb22b162c6] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 813.644053] env[69367]: DEBUG oslo_concurrency.lockutils [None req-29fe1fd3-03fb-4277-958e-0ff9f92cf48e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Lock "7f937d89-684b-44f5-9f30-783aeafe99d1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.444s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 813.667277] env[69367]: DEBUG nova.scheduler.client.report [None req-732e6b23-494d-4673-a2d3-4e5758dc78a7 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 813.684159] env[69367]: DEBUG nova.scheduler.client.report [None req-732e6b23-494d-4673-a2d3-4e5758dc78a7 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 813.684363] env[69367]: DEBUG nova.compute.provider_tree [None req-732e6b23-494d-4673-a2d3-4e5758dc78a7 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 813.697376] env[69367]: DEBUG nova.scheduler.client.report [None req-732e6b23-494d-4673-a2d3-4e5758dc78a7 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 813.703684] env[69367]: DEBUG oslo_concurrency.lockutils [None req-ab1bccff-30fc-46dc-87b0-e255eed0c66e tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Acquiring lock "4901e02d-c55c-4c27-8d5a-e48c7e83aaa9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 813.703910] env[69367]: DEBUG oslo_concurrency.lockutils [None req-ab1bccff-30fc-46dc-87b0-e255eed0c66e tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Lock "4901e02d-c55c-4c27-8d5a-e48c7e83aaa9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 813.705396] env[69367]: INFO nova.compute.manager [None req-5687842f-f711-41da-ac00-7ef2110513fe tempest-ServerRescueTestJSONUnderV235-356240681 tempest-ServerRescueTestJSONUnderV235-356240681-project-member] [instance: b6d326ff-45aa-44b6-b99c-95edca647e2c] Took 1.04 seconds to deallocate network for instance. [ 813.716782] env[69367]: DEBUG nova.scheduler.client.report [None req-732e6b23-494d-4673-a2d3-4e5758dc78a7 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 813.734544] env[69367]: DEBUG oslo_vmware.api [None req-cd3b457f-f206-4206-8992-b8b6ee33c044 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Task: {'id': task-4234001, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.898130] env[69367]: DEBUG oslo_concurrency.lockutils [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 814.241025] env[69367]: DEBUG oslo_vmware.api [None req-cd3b457f-f206-4206-8992-b8b6ee33c044 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Task: {'id': task-4234001, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.278180] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef7c25b7-5f58-41e1-b742-4b601875544d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.287665] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa3e8715-35da-46bb-8fc6-a82e2e63fc3f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.324247] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca0e8579-de69-43e4-a723-92f123535876 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.330553] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-761b86f8-e04f-47cb-bf7c-29ba269dfc1a {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.346583] env[69367]: DEBUG nova.compute.provider_tree [None req-732e6b23-494d-4673-a2d3-4e5758dc78a7 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 814.736572] env[69367]: DEBUG oslo_vmware.api [None req-cd3b457f-f206-4206-8992-b8b6ee33c044 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Task: {'id': task-4234001, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.744714] env[69367]: INFO nova.scheduler.client.report [None req-5687842f-f711-41da-ac00-7ef2110513fe tempest-ServerRescueTestJSONUnderV235-356240681 tempest-ServerRescueTestJSONUnderV235-356240681-project-member] Deleted allocations for instance b6d326ff-45aa-44b6-b99c-95edca647e2c [ 814.882033] env[69367]: ERROR nova.scheduler.client.report [None req-732e6b23-494d-4673-a2d3-4e5758dc78a7 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [req-c7eb4389-5f8c-443e-9afc-2c5fba19f38d] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-c7eb4389-5f8c-443e-9afc-2c5fba19f38d"}]} [ 814.882033] env[69367]: DEBUG oslo_concurrency.lockutils [None req-732e6b23-494d-4673-a2d3-4e5758dc78a7 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.745s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 814.882428] env[69367]: ERROR nova.compute.manager [None req-732e6b23-494d-4673-a2d3-4e5758dc78a7 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] Setting instance vm_state to ERROR: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 814.882428] env[69367]: ERROR nova.compute.manager [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] Traceback (most recent call last): [ 814.882428] env[69367]: ERROR nova.compute.manager [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 814.882428] env[69367]: ERROR nova.compute.manager [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] yield [ 814.882428] env[69367]: ERROR nova.compute.manager [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 814.882428] env[69367]: ERROR nova.compute.manager [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] self.set_inventory_for_provider( [ 814.882428] env[69367]: ERROR nova.compute.manager [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 814.882428] env[69367]: ERROR nova.compute.manager [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 814.882428] env[69367]: ERROR nova.compute.manager [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-c7eb4389-5f8c-443e-9afc-2c5fba19f38d"}]} [ 814.882428] env[69367]: ERROR nova.compute.manager [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] [ 814.882428] env[69367]: ERROR nova.compute.manager [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] During handling of the above exception, another exception occurred: [ 814.882428] env[69367]: ERROR nova.compute.manager [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] [ 814.882428] env[69367]: ERROR nova.compute.manager [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] Traceback (most recent call last): [ 814.882428] env[69367]: ERROR nova.compute.manager [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 814.882428] env[69367]: ERROR nova.compute.manager [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] self._delete_instance(context, instance, bdms) [ 814.882428] env[69367]: ERROR nova.compute.manager [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 814.882428] env[69367]: ERROR nova.compute.manager [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] self._complete_deletion(context, instance) [ 814.882428] env[69367]: ERROR nova.compute.manager [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 814.882428] env[69367]: ERROR nova.compute.manager [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] self._update_resource_tracker(context, instance) [ 814.882428] env[69367]: ERROR nova.compute.manager [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 814.882428] env[69367]: ERROR nova.compute.manager [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] self.rt.update_usage(context, instance, instance.node) [ 814.882428] env[69367]: ERROR nova.compute.manager [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 814.882428] env[69367]: ERROR nova.compute.manager [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] return f(*args, **kwargs) [ 814.882428] env[69367]: ERROR nova.compute.manager [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 814.882428] env[69367]: ERROR nova.compute.manager [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] self._update(context.elevated(), self.compute_nodes[nodename]) [ 814.882428] env[69367]: ERROR nova.compute.manager [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 814.882428] env[69367]: ERROR nova.compute.manager [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] self._update_to_placement(context, compute_node, startup) [ 814.882428] env[69367]: ERROR nova.compute.manager [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 814.882428] env[69367]: ERROR nova.compute.manager [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 814.882428] env[69367]: ERROR nova.compute.manager [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 814.882428] env[69367]: ERROR nova.compute.manager [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] return attempt.get(self._wrap_exception) [ 814.882428] env[69367]: ERROR nova.compute.manager [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 814.882428] env[69367]: ERROR nova.compute.manager [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] six.reraise(self.value[0], self.value[1], self.value[2]) [ 814.882428] env[69367]: ERROR nova.compute.manager [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 814.883549] env[69367]: ERROR nova.compute.manager [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] raise value [ 814.883549] env[69367]: ERROR nova.compute.manager [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 814.883549] env[69367]: ERROR nova.compute.manager [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 814.883549] env[69367]: ERROR nova.compute.manager [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 814.883549] env[69367]: ERROR nova.compute.manager [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] self.reportclient.update_from_provider_tree( [ 814.883549] env[69367]: ERROR nova.compute.manager [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 814.883549] env[69367]: ERROR nova.compute.manager [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] with catch_all(pd.uuid): [ 814.883549] env[69367]: ERROR nova.compute.manager [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 814.883549] env[69367]: ERROR nova.compute.manager [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] self.gen.throw(typ, value, traceback) [ 814.883549] env[69367]: ERROR nova.compute.manager [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 814.883549] env[69367]: ERROR nova.compute.manager [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] raise exception.ResourceProviderSyncFailed() [ 814.883549] env[69367]: ERROR nova.compute.manager [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 814.883549] env[69367]: ERROR nova.compute.manager [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] [ 814.889097] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2b7dd111-b8dc-4eab-92ec-fa522d44b4d5 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.674s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 814.890646] env[69367]: INFO nova.compute.claims [None req-2b7dd111-b8dc-4eab-92ec-fa522d44b4d5 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] [instance: eab70948-bb67-4f56-9f35-65e164fd5990] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 815.160792] env[69367]: DEBUG oslo_concurrency.lockutils [None req-29fe1fd3-03fb-4277-958e-0ff9f92cf48e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 815.237177] env[69367]: DEBUG oslo_vmware.api [None req-cd3b457f-f206-4206-8992-b8b6ee33c044 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Task: {'id': task-4234001, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.252910] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5687842f-f711-41da-ac00-7ef2110513fe tempest-ServerRescueTestJSONUnderV235-356240681 tempest-ServerRescueTestJSONUnderV235-356240681-project-member] Lock "b6d326ff-45aa-44b6-b99c-95edca647e2c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 61.245s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 815.400736] env[69367]: DEBUG oslo_concurrency.lockutils [None req-732e6b23-494d-4673-a2d3-4e5758dc78a7 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Lock "ab9d8e3e-65c5-4ac9-920f-3042b8cf2054" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.125s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 815.737329] env[69367]: DEBUG oslo_vmware.api [None req-cd3b457f-f206-4206-8992-b8b6ee33c044 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Task: {'id': task-4234001, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.755999] env[69367]: DEBUG nova.compute.manager [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 54a1f586-481d-427e-ba0b-be90e5573bd3] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 815.933659] env[69367]: DEBUG nova.scheduler.client.report [None req-2b7dd111-b8dc-4eab-92ec-fa522d44b4d5 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 815.949906] env[69367]: DEBUG nova.scheduler.client.report [None req-2b7dd111-b8dc-4eab-92ec-fa522d44b4d5 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 815.950244] env[69367]: DEBUG nova.compute.provider_tree [None req-2b7dd111-b8dc-4eab-92ec-fa522d44b4d5 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 815.967592] env[69367]: DEBUG nova.scheduler.client.report [None req-2b7dd111-b8dc-4eab-92ec-fa522d44b4d5 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 815.993131] env[69367]: DEBUG nova.scheduler.client.report [None req-2b7dd111-b8dc-4eab-92ec-fa522d44b4d5 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 816.241221] env[69367]: DEBUG oslo_vmware.api [None req-cd3b457f-f206-4206-8992-b8b6ee33c044 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Task: {'id': task-4234001, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.280463] env[69367]: DEBUG oslo_concurrency.lockutils [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 816.449214] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6f33256-73d0-4c56-9614-b3ab4723cb7e {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.457779] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b97d5b5f-6d46-4b20-9d9c-518ea6ab447e {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.498736] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d411740-785f-4793-b20c-fe8d903b1509 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.507987] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d08a1d86-987e-42ba-beaf-8f2e20dadd18 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.530238] env[69367]: DEBUG nova.compute.provider_tree [None req-2b7dd111-b8dc-4eab-92ec-fa522d44b4d5 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 816.738444] env[69367]: DEBUG oslo_vmware.api [None req-cd3b457f-f206-4206-8992-b8b6ee33c044 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Task: {'id': task-4234001, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.919511] env[69367]: DEBUG oslo_concurrency.lockutils [None req-732e6b23-494d-4673-a2d3-4e5758dc78a7 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 817.055016] env[69367]: ERROR nova.scheduler.client.report [None req-2b7dd111-b8dc-4eab-92ec-fa522d44b4d5 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] [req-46f970b2-85df-43c8-ae26-0261bc174d4b] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-46f970b2-85df-43c8-ae26-0261bc174d4b"}]} [ 817.055482] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2b7dd111-b8dc-4eab-92ec-fa522d44b4d5 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.170s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 817.056096] env[69367]: ERROR nova.compute.manager [None req-2b7dd111-b8dc-4eab-92ec-fa522d44b4d5 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] [instance: eab70948-bb67-4f56-9f35-65e164fd5990] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 817.056096] env[69367]: ERROR nova.compute.manager [instance: eab70948-bb67-4f56-9f35-65e164fd5990] Traceback (most recent call last): [ 817.056096] env[69367]: ERROR nova.compute.manager [instance: eab70948-bb67-4f56-9f35-65e164fd5990] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 817.056096] env[69367]: ERROR nova.compute.manager [instance: eab70948-bb67-4f56-9f35-65e164fd5990] yield [ 817.056096] env[69367]: ERROR nova.compute.manager [instance: eab70948-bb67-4f56-9f35-65e164fd5990] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 817.056096] env[69367]: ERROR nova.compute.manager [instance: eab70948-bb67-4f56-9f35-65e164fd5990] self.set_inventory_for_provider( [ 817.056096] env[69367]: ERROR nova.compute.manager [instance: eab70948-bb67-4f56-9f35-65e164fd5990] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 817.056096] env[69367]: ERROR nova.compute.manager [instance: eab70948-bb67-4f56-9f35-65e164fd5990] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 817.056096] env[69367]: ERROR nova.compute.manager [instance: eab70948-bb67-4f56-9f35-65e164fd5990] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-46f970b2-85df-43c8-ae26-0261bc174d4b"}]} [ 817.056096] env[69367]: ERROR nova.compute.manager [instance: eab70948-bb67-4f56-9f35-65e164fd5990] [ 817.056096] env[69367]: ERROR nova.compute.manager [instance: eab70948-bb67-4f56-9f35-65e164fd5990] During handling of the above exception, another exception occurred: [ 817.056096] env[69367]: ERROR nova.compute.manager [instance: eab70948-bb67-4f56-9f35-65e164fd5990] [ 817.056096] env[69367]: ERROR nova.compute.manager [instance: eab70948-bb67-4f56-9f35-65e164fd5990] Traceback (most recent call last): [ 817.056096] env[69367]: ERROR nova.compute.manager [instance: eab70948-bb67-4f56-9f35-65e164fd5990] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 817.056096] env[69367]: ERROR nova.compute.manager [instance: eab70948-bb67-4f56-9f35-65e164fd5990] with self.rt.instance_claim(context, instance, node, allocs, [ 817.056096] env[69367]: ERROR nova.compute.manager [instance: eab70948-bb67-4f56-9f35-65e164fd5990] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 817.056096] env[69367]: ERROR nova.compute.manager [instance: eab70948-bb67-4f56-9f35-65e164fd5990] return f(*args, **kwargs) [ 817.056096] env[69367]: ERROR nova.compute.manager [instance: eab70948-bb67-4f56-9f35-65e164fd5990] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 817.056096] env[69367]: ERROR nova.compute.manager [instance: eab70948-bb67-4f56-9f35-65e164fd5990] self._update(elevated, cn) [ 817.056096] env[69367]: ERROR nova.compute.manager [instance: eab70948-bb67-4f56-9f35-65e164fd5990] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 817.056096] env[69367]: ERROR nova.compute.manager [instance: eab70948-bb67-4f56-9f35-65e164fd5990] self._update_to_placement(context, compute_node, startup) [ 817.056096] env[69367]: ERROR nova.compute.manager [instance: eab70948-bb67-4f56-9f35-65e164fd5990] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 817.056096] env[69367]: ERROR nova.compute.manager [instance: eab70948-bb67-4f56-9f35-65e164fd5990] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 817.056096] env[69367]: ERROR nova.compute.manager [instance: eab70948-bb67-4f56-9f35-65e164fd5990] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 817.056096] env[69367]: ERROR nova.compute.manager [instance: eab70948-bb67-4f56-9f35-65e164fd5990] return attempt.get(self._wrap_exception) [ 817.056096] env[69367]: ERROR nova.compute.manager [instance: eab70948-bb67-4f56-9f35-65e164fd5990] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 817.056096] env[69367]: ERROR nova.compute.manager [instance: eab70948-bb67-4f56-9f35-65e164fd5990] six.reraise(self.value[0], self.value[1], self.value[2]) [ 817.056096] env[69367]: ERROR nova.compute.manager [instance: eab70948-bb67-4f56-9f35-65e164fd5990] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 817.056096] env[69367]: ERROR nova.compute.manager [instance: eab70948-bb67-4f56-9f35-65e164fd5990] raise value [ 817.056096] env[69367]: ERROR nova.compute.manager [instance: eab70948-bb67-4f56-9f35-65e164fd5990] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 817.056096] env[69367]: ERROR nova.compute.manager [instance: eab70948-bb67-4f56-9f35-65e164fd5990] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 817.056096] env[69367]: ERROR nova.compute.manager [instance: eab70948-bb67-4f56-9f35-65e164fd5990] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 817.056096] env[69367]: ERROR nova.compute.manager [instance: eab70948-bb67-4f56-9f35-65e164fd5990] self.reportclient.update_from_provider_tree( [ 817.056096] env[69367]: ERROR nova.compute.manager [instance: eab70948-bb67-4f56-9f35-65e164fd5990] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 817.057280] env[69367]: ERROR nova.compute.manager [instance: eab70948-bb67-4f56-9f35-65e164fd5990] with catch_all(pd.uuid): [ 817.057280] env[69367]: ERROR nova.compute.manager [instance: eab70948-bb67-4f56-9f35-65e164fd5990] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 817.057280] env[69367]: ERROR nova.compute.manager [instance: eab70948-bb67-4f56-9f35-65e164fd5990] self.gen.throw(typ, value, traceback) [ 817.057280] env[69367]: ERROR nova.compute.manager [instance: eab70948-bb67-4f56-9f35-65e164fd5990] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 817.057280] env[69367]: ERROR nova.compute.manager [instance: eab70948-bb67-4f56-9f35-65e164fd5990] raise exception.ResourceProviderSyncFailed() [ 817.057280] env[69367]: ERROR nova.compute.manager [instance: eab70948-bb67-4f56-9f35-65e164fd5990] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 817.057280] env[69367]: ERROR nova.compute.manager [instance: eab70948-bb67-4f56-9f35-65e164fd5990] [ 817.057280] env[69367]: DEBUG nova.compute.utils [None req-2b7dd111-b8dc-4eab-92ec-fa522d44b4d5 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] [instance: eab70948-bb67-4f56-9f35-65e164fd5990] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 817.057999] env[69367]: DEBUG oslo_concurrency.lockutils [None req-6d85fd21-9fe9-4ff3-9e1e-024b8ac24ad1 tempest-ServerShowV254Test-432952848 tempest-ServerShowV254Test-432952848-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.638s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 817.059548] env[69367]: INFO nova.compute.claims [None req-6d85fd21-9fe9-4ff3-9e1e-024b8ac24ad1 tempest-ServerShowV254Test-432952848 tempest-ServerShowV254Test-432952848-project-member] [instance: 484ce161-5686-4573-8eed-4ebb3505e843] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 817.062825] env[69367]: DEBUG nova.compute.manager [None req-2b7dd111-b8dc-4eab-92ec-fa522d44b4d5 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] [instance: eab70948-bb67-4f56-9f35-65e164fd5990] Build of instance eab70948-bb67-4f56-9f35-65e164fd5990 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 817.063324] env[69367]: DEBUG nova.compute.manager [None req-2b7dd111-b8dc-4eab-92ec-fa522d44b4d5 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] [instance: eab70948-bb67-4f56-9f35-65e164fd5990] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 817.063558] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2b7dd111-b8dc-4eab-92ec-fa522d44b4d5 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Acquiring lock "refresh_cache-eab70948-bb67-4f56-9f35-65e164fd5990" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 817.063708] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2b7dd111-b8dc-4eab-92ec-fa522d44b4d5 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Acquired lock "refresh_cache-eab70948-bb67-4f56-9f35-65e164fd5990" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 817.063868] env[69367]: DEBUG nova.network.neutron [None req-2b7dd111-b8dc-4eab-92ec-fa522d44b4d5 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] [instance: eab70948-bb67-4f56-9f35-65e164fd5990] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 817.239406] env[69367]: DEBUG oslo_vmware.api [None req-cd3b457f-f206-4206-8992-b8b6ee33c044 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Task: {'id': task-4234001, 'name': ReconfigVM_Task} progress is 18%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.597976] env[69367]: DEBUG nova.network.neutron [None req-2b7dd111-b8dc-4eab-92ec-fa522d44b4d5 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] [instance: eab70948-bb67-4f56-9f35-65e164fd5990] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 817.743704] env[69367]: DEBUG nova.network.neutron [None req-2b7dd111-b8dc-4eab-92ec-fa522d44b4d5 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] [instance: eab70948-bb67-4f56-9f35-65e164fd5990] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 817.745228] env[69367]: DEBUG oslo_vmware.api [None req-cd3b457f-f206-4206-8992-b8b6ee33c044 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Task: {'id': task-4234001, 'name': ReconfigVM_Task, 'duration_secs': 5.780555} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.745458] env[69367]: DEBUG oslo_concurrency.lockutils [None req-cd3b457f-f206-4206-8992-b8b6ee33c044 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Releasing lock "837b4093-308b-440b-940d-fc0227a5c590" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 817.745661] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-cd3b457f-f206-4206-8992-b8b6ee33c044 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Reconfigured VM to detach interface {{(pid=69367) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 817.810329] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 817.810879] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 817.985435] env[69367]: DEBUG nova.compute.manager [req-4f36b5e2-2518-4f2b-a9fb-cf483d6ca6b3 req-d32dbb52-726d-4f14-b817-77f675129782 service nova] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Received event network-vif-deleted-adb5a8dc-4297-4171-b593-e1230f1746c7 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 817.985553] env[69367]: INFO nova.compute.manager [req-4f36b5e2-2518-4f2b-a9fb-cf483d6ca6b3 req-d32dbb52-726d-4f14-b817-77f675129782 service nova] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Neutron deleted interface adb5a8dc-4297-4171-b593-e1230f1746c7; detaching it from the instance and deleting it from the info cache [ 817.985770] env[69367]: DEBUG nova.network.neutron [req-4f36b5e2-2518-4f2b-a9fb-cf483d6ca6b3 req-d32dbb52-726d-4f14-b817-77f675129782 service nova] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Updating instance_info_cache with network_info: [{"id": "00ad3cfd-f282-442d-b152-85e841dd8a16", "address": "fa:16:3e:17:02:f1", "network": {"id": "a8e5c1a6-2526-4042-8a8d-fdb54dbe7bf9", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1429177141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.254", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f5bc3d470905412ea72a8eedb98e9e47", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e6f11c0d-c73a-47f5-b02e-47bff48da0e4", "external-id": "nsx-vlan-transportzone-345", "segmentation_id": 345, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap00ad3cfd-f2", "ovs_interfaceid": "00ad3cfd-f282-442d-b152-85e841dd8a16", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 818.093363] env[69367]: DEBUG nova.scheduler.client.report [None req-6d85fd21-9fe9-4ff3-9e1e-024b8ac24ad1 tempest-ServerShowV254Test-432952848 tempest-ServerShowV254Test-432952848-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 818.114795] env[69367]: DEBUG nova.scheduler.client.report [None req-6d85fd21-9fe9-4ff3-9e1e-024b8ac24ad1 tempest-ServerShowV254Test-432952848 tempest-ServerShowV254Test-432952848-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 818.114795] env[69367]: DEBUG nova.compute.provider_tree [None req-6d85fd21-9fe9-4ff3-9e1e-024b8ac24ad1 tempest-ServerShowV254Test-432952848 tempest-ServerShowV254Test-432952848-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 818.131136] env[69367]: DEBUG nova.scheduler.client.report [None req-6d85fd21-9fe9-4ff3-9e1e-024b8ac24ad1 tempest-ServerShowV254Test-432952848 tempest-ServerShowV254Test-432952848-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 818.151797] env[69367]: DEBUG nova.scheduler.client.report [None req-6d85fd21-9fe9-4ff3-9e1e-024b8ac24ad1 tempest-ServerShowV254Test-432952848 tempest-ServerShowV254Test-432952848-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 818.251567] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2b7dd111-b8dc-4eab-92ec-fa522d44b4d5 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Releasing lock "refresh_cache-eab70948-bb67-4f56-9f35-65e164fd5990" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 818.251761] env[69367]: DEBUG nova.compute.manager [None req-2b7dd111-b8dc-4eab-92ec-fa522d44b4d5 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 818.251949] env[69367]: DEBUG nova.compute.manager [None req-2b7dd111-b8dc-4eab-92ec-fa522d44b4d5 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] [instance: eab70948-bb67-4f56-9f35-65e164fd5990] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 818.252212] env[69367]: DEBUG nova.network.neutron [None req-2b7dd111-b8dc-4eab-92ec-fa522d44b4d5 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] [instance: eab70948-bb67-4f56-9f35-65e164fd5990] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 818.270357] env[69367]: DEBUG nova.network.neutron [None req-2b7dd111-b8dc-4eab-92ec-fa522d44b4d5 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] [instance: eab70948-bb67-4f56-9f35-65e164fd5990] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 818.323322] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 818.324510] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 818.324510] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 818.324510] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 818.324510] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 818.324510] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 818.324904] env[69367]: DEBUG nova.compute.manager [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69367) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11183}} [ 818.324904] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 818.489048] env[69367]: DEBUG oslo_concurrency.lockutils [req-4f36b5e2-2518-4f2b-a9fb-cf483d6ca6b3 req-d32dbb52-726d-4f14-b817-77f675129782 service nova] Acquiring lock "837b4093-308b-440b-940d-fc0227a5c590" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 818.489265] env[69367]: DEBUG oslo_concurrency.lockutils [req-4f36b5e2-2518-4f2b-a9fb-cf483d6ca6b3 req-d32dbb52-726d-4f14-b817-77f675129782 service nova] Acquired lock "837b4093-308b-440b-940d-fc0227a5c590" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 818.490426] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1840bb7-c970-496b-99c3-707702bdab09 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.513559] env[69367]: DEBUG oslo_concurrency.lockutils [req-4f36b5e2-2518-4f2b-a9fb-cf483d6ca6b3 req-d32dbb52-726d-4f14-b817-77f675129782 service nova] Releasing lock "837b4093-308b-440b-940d-fc0227a5c590" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 818.513832] env[69367]: WARNING nova.compute.manager [req-4f36b5e2-2518-4f2b-a9fb-cf483d6ca6b3 req-d32dbb52-726d-4f14-b817-77f675129782 service nova] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Detach interface failed, port_id=adb5a8dc-4297-4171-b593-e1230f1746c7, reason: No device with interface-id adb5a8dc-4297-4171-b593-e1230f1746c7 exists on VM: nova.exception.NotFound: No device with interface-id adb5a8dc-4297-4171-b593-e1230f1746c7 exists on VM [ 818.564022] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51e7ccb1-a787-44e8-816c-c8396b92c749 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.571704] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b5e67a1-c33c-4239-a2ad-846b273cb57f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.603599] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30c78908-6d0e-450f-a250-e51ee3f5ee49 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.614844] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a0ee110-056d-4fa8-84ca-38c911a2076f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.630859] env[69367]: DEBUG nova.compute.provider_tree [None req-6d85fd21-9fe9-4ff3-9e1e-024b8ac24ad1 tempest-ServerShowV254Test-432952848 tempest-ServerShowV254Test-432952848-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 818.772900] env[69367]: DEBUG nova.network.neutron [None req-2b7dd111-b8dc-4eab-92ec-fa522d44b4d5 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] [instance: eab70948-bb67-4f56-9f35-65e164fd5990] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 818.829117] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 818.969021] env[69367]: DEBUG oslo_concurrency.lockutils [None req-cd3b457f-f206-4206-8992-b8b6ee33c044 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Acquiring lock "refresh_cache-837b4093-308b-440b-940d-fc0227a5c590" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 818.969252] env[69367]: DEBUG oslo_concurrency.lockutils [None req-cd3b457f-f206-4206-8992-b8b6ee33c044 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Acquired lock "refresh_cache-837b4093-308b-440b-940d-fc0227a5c590" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 818.969440] env[69367]: DEBUG nova.network.neutron [None req-cd3b457f-f206-4206-8992-b8b6ee33c044 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 819.114550] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0bfe79aa-15cb-4630-92d3-2de33fe7c748 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Acquiring lock "837b4093-308b-440b-940d-fc0227a5c590" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 819.114827] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0bfe79aa-15cb-4630-92d3-2de33fe7c748 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Lock "837b4093-308b-440b-940d-fc0227a5c590" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 819.115054] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0bfe79aa-15cb-4630-92d3-2de33fe7c748 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Acquiring lock "837b4093-308b-440b-940d-fc0227a5c590-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 819.115252] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0bfe79aa-15cb-4630-92d3-2de33fe7c748 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Lock "837b4093-308b-440b-940d-fc0227a5c590-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 819.115424] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0bfe79aa-15cb-4630-92d3-2de33fe7c748 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Lock "837b4093-308b-440b-940d-fc0227a5c590-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 819.117523] env[69367]: INFO nova.compute.manager [None req-0bfe79aa-15cb-4630-92d3-2de33fe7c748 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Terminating instance [ 819.157977] env[69367]: ERROR nova.scheduler.client.report [None req-6d85fd21-9fe9-4ff3-9e1e-024b8ac24ad1 tempest-ServerShowV254Test-432952848 tempest-ServerShowV254Test-432952848-project-member] [req-825b44a1-e8ae-4918-8a1e-fcb62908f014] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-825b44a1-e8ae-4918-8a1e-fcb62908f014"}]} [ 819.158381] env[69367]: DEBUG oslo_concurrency.lockutils [None req-6d85fd21-9fe9-4ff3-9e1e-024b8ac24ad1 tempest-ServerShowV254Test-432952848 tempest-ServerShowV254Test-432952848-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.100s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 819.158952] env[69367]: ERROR nova.compute.manager [None req-6d85fd21-9fe9-4ff3-9e1e-024b8ac24ad1 tempest-ServerShowV254Test-432952848 tempest-ServerShowV254Test-432952848-project-member] [instance: 484ce161-5686-4573-8eed-4ebb3505e843] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 819.158952] env[69367]: ERROR nova.compute.manager [instance: 484ce161-5686-4573-8eed-4ebb3505e843] Traceback (most recent call last): [ 819.158952] env[69367]: ERROR nova.compute.manager [instance: 484ce161-5686-4573-8eed-4ebb3505e843] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 819.158952] env[69367]: ERROR nova.compute.manager [instance: 484ce161-5686-4573-8eed-4ebb3505e843] yield [ 819.158952] env[69367]: ERROR nova.compute.manager [instance: 484ce161-5686-4573-8eed-4ebb3505e843] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 819.158952] env[69367]: ERROR nova.compute.manager [instance: 484ce161-5686-4573-8eed-4ebb3505e843] self.set_inventory_for_provider( [ 819.158952] env[69367]: ERROR nova.compute.manager [instance: 484ce161-5686-4573-8eed-4ebb3505e843] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 819.158952] env[69367]: ERROR nova.compute.manager [instance: 484ce161-5686-4573-8eed-4ebb3505e843] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 819.158952] env[69367]: ERROR nova.compute.manager [instance: 484ce161-5686-4573-8eed-4ebb3505e843] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-825b44a1-e8ae-4918-8a1e-fcb62908f014"}]} [ 819.158952] env[69367]: ERROR nova.compute.manager [instance: 484ce161-5686-4573-8eed-4ebb3505e843] [ 819.158952] env[69367]: ERROR nova.compute.manager [instance: 484ce161-5686-4573-8eed-4ebb3505e843] During handling of the above exception, another exception occurred: [ 819.158952] env[69367]: ERROR nova.compute.manager [instance: 484ce161-5686-4573-8eed-4ebb3505e843] [ 819.158952] env[69367]: ERROR nova.compute.manager [instance: 484ce161-5686-4573-8eed-4ebb3505e843] Traceback (most recent call last): [ 819.158952] env[69367]: ERROR nova.compute.manager [instance: 484ce161-5686-4573-8eed-4ebb3505e843] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 819.158952] env[69367]: ERROR nova.compute.manager [instance: 484ce161-5686-4573-8eed-4ebb3505e843] with self.rt.instance_claim(context, instance, node, allocs, [ 819.158952] env[69367]: ERROR nova.compute.manager [instance: 484ce161-5686-4573-8eed-4ebb3505e843] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 819.158952] env[69367]: ERROR nova.compute.manager [instance: 484ce161-5686-4573-8eed-4ebb3505e843] return f(*args, **kwargs) [ 819.158952] env[69367]: ERROR nova.compute.manager [instance: 484ce161-5686-4573-8eed-4ebb3505e843] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 819.158952] env[69367]: ERROR nova.compute.manager [instance: 484ce161-5686-4573-8eed-4ebb3505e843] self._update(elevated, cn) [ 819.158952] env[69367]: ERROR nova.compute.manager [instance: 484ce161-5686-4573-8eed-4ebb3505e843] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 819.158952] env[69367]: ERROR nova.compute.manager [instance: 484ce161-5686-4573-8eed-4ebb3505e843] self._update_to_placement(context, compute_node, startup) [ 819.158952] env[69367]: ERROR nova.compute.manager [instance: 484ce161-5686-4573-8eed-4ebb3505e843] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 819.158952] env[69367]: ERROR nova.compute.manager [instance: 484ce161-5686-4573-8eed-4ebb3505e843] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 819.158952] env[69367]: ERROR nova.compute.manager [instance: 484ce161-5686-4573-8eed-4ebb3505e843] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 819.158952] env[69367]: ERROR nova.compute.manager [instance: 484ce161-5686-4573-8eed-4ebb3505e843] return attempt.get(self._wrap_exception) [ 819.158952] env[69367]: ERROR nova.compute.manager [instance: 484ce161-5686-4573-8eed-4ebb3505e843] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 819.158952] env[69367]: ERROR nova.compute.manager [instance: 484ce161-5686-4573-8eed-4ebb3505e843] six.reraise(self.value[0], self.value[1], self.value[2]) [ 819.158952] env[69367]: ERROR nova.compute.manager [instance: 484ce161-5686-4573-8eed-4ebb3505e843] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 819.158952] env[69367]: ERROR nova.compute.manager [instance: 484ce161-5686-4573-8eed-4ebb3505e843] raise value [ 819.158952] env[69367]: ERROR nova.compute.manager [instance: 484ce161-5686-4573-8eed-4ebb3505e843] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 819.158952] env[69367]: ERROR nova.compute.manager [instance: 484ce161-5686-4573-8eed-4ebb3505e843] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 819.158952] env[69367]: ERROR nova.compute.manager [instance: 484ce161-5686-4573-8eed-4ebb3505e843] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 819.158952] env[69367]: ERROR nova.compute.manager [instance: 484ce161-5686-4573-8eed-4ebb3505e843] self.reportclient.update_from_provider_tree( [ 819.158952] env[69367]: ERROR nova.compute.manager [instance: 484ce161-5686-4573-8eed-4ebb3505e843] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 819.160332] env[69367]: ERROR nova.compute.manager [instance: 484ce161-5686-4573-8eed-4ebb3505e843] with catch_all(pd.uuid): [ 819.160332] env[69367]: ERROR nova.compute.manager [instance: 484ce161-5686-4573-8eed-4ebb3505e843] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 819.160332] env[69367]: ERROR nova.compute.manager [instance: 484ce161-5686-4573-8eed-4ebb3505e843] self.gen.throw(typ, value, traceback) [ 819.160332] env[69367]: ERROR nova.compute.manager [instance: 484ce161-5686-4573-8eed-4ebb3505e843] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 819.160332] env[69367]: ERROR nova.compute.manager [instance: 484ce161-5686-4573-8eed-4ebb3505e843] raise exception.ResourceProviderSyncFailed() [ 819.160332] env[69367]: ERROR nova.compute.manager [instance: 484ce161-5686-4573-8eed-4ebb3505e843] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 819.160332] env[69367]: ERROR nova.compute.manager [instance: 484ce161-5686-4573-8eed-4ebb3505e843] [ 819.160332] env[69367]: DEBUG nova.compute.utils [None req-6d85fd21-9fe9-4ff3-9e1e-024b8ac24ad1 tempest-ServerShowV254Test-432952848 tempest-ServerShowV254Test-432952848-project-member] [instance: 484ce161-5686-4573-8eed-4ebb3505e843] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 819.160791] env[69367]: DEBUG oslo_concurrency.lockutils [None req-6853d6a0-2173-46e7-8542-239f48c4a2c9 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.716s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 819.160984] env[69367]: DEBUG oslo_concurrency.lockutils [None req-6853d6a0-2173-46e7-8542-239f48c4a2c9 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 819.161163] env[69367]: INFO nova.compute.manager [None req-6853d6a0-2173-46e7-8542-239f48c4a2c9 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] [instance: e1c7d100-4ad7-4871-970f-bb7562bfc6fc] Successfully reverted task state from None on failure for instance. [ 819.163716] env[69367]: DEBUG oslo_concurrency.lockutils [None req-3d051d46-95b5-458b-8d77-42673c6fbe04 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.583s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 819.166360] env[69367]: INFO nova.compute.claims [None req-3d051d46-95b5-458b-8d77-42673c6fbe04 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: bb59f765-0d86-4803-845c-8186e9341702] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 819.170173] env[69367]: ERROR oslo_messaging.rpc.server [None req-6853d6a0-2173-46e7-8542-239f48c4a2c9 tempest-SecurityGroupsTestJSON-322828888 tempest-SecurityGroupsTestJSON-322828888-project-member] Exception during message handling: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 819.170173] env[69367]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 819.170173] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 819.170173] env[69367]: ERROR oslo_messaging.rpc.server yield [ 819.170173] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 819.170173] env[69367]: ERROR oslo_messaging.rpc.server self.set_inventory_for_provider( [ 819.170173] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 819.170173] env[69367]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 819.170173] env[69367]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-2cc171e0-67c5-4d1d-bf2a-7c157c7b7f80"}]} [ 819.170173] env[69367]: ERROR oslo_messaging.rpc.server [ 819.170173] env[69367]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 819.170173] env[69367]: ERROR oslo_messaging.rpc.server [ 819.170173] env[69367]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 819.170173] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 174, in _process_incoming [ 819.170173] env[69367]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 819.170173] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 819.170173] env[69367]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 819.170173] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 819.170173] env[69367]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 819.170173] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 819.170173] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 819.170173] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 819.170173] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 819.170173] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 819.170173] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 819.170173] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 819.170173] env[69367]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 819.170173] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 167, in decorated_function [ 819.170173] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 819.170173] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 819.170173] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 819.170173] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 819.170173] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 819.170173] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 158, in decorated_function [ 819.170173] env[69367]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 819.170173] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1483, in decorated_function [ 819.170173] env[69367]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 819.170173] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 214, in decorated_function [ 819.170173] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 819.170173] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 819.170173] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 819.170173] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 819.170173] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 819.170173] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 204, in decorated_function [ 819.170173] env[69367]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 819.170173] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3388, in terminate_instance [ 819.170173] env[69367]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 819.171553] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 819.171553] env[69367]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 819.171553] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3383, in do_terminate_instance [ 819.171553] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 819.171553] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 819.171553] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 819.171553] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 819.171553] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 819.171553] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 819.171553] env[69367]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 819.171553] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 819.171553] env[69367]: ERROR oslo_messaging.rpc.server self._complete_deletion(context, instance) [ 819.171553] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 819.171553] env[69367]: ERROR oslo_messaging.rpc.server self._update_resource_tracker(context, instance) [ 819.171553] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 819.171553] env[69367]: ERROR oslo_messaging.rpc.server self.rt.update_usage(context, instance, instance.node) [ 819.171553] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 819.171553] env[69367]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 819.171553] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 819.171553] env[69367]: ERROR oslo_messaging.rpc.server self._update(context.elevated(), self.compute_nodes[nodename]) [ 819.171553] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 819.171553] env[69367]: ERROR oslo_messaging.rpc.server self._update_to_placement(context, compute_node, startup) [ 819.171553] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 819.171553] env[69367]: ERROR oslo_messaging.rpc.server return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 819.171553] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 819.171553] env[69367]: ERROR oslo_messaging.rpc.server return attempt.get(self._wrap_exception) [ 819.171553] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 819.171553] env[69367]: ERROR oslo_messaging.rpc.server six.reraise(self.value[0], self.value[1], self.value[2]) [ 819.171553] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 819.171553] env[69367]: ERROR oslo_messaging.rpc.server raise value [ 819.171553] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 819.171553] env[69367]: ERROR oslo_messaging.rpc.server attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 819.171553] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 819.171553] env[69367]: ERROR oslo_messaging.rpc.server self.reportclient.update_from_provider_tree( [ 819.171553] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 819.171553] env[69367]: ERROR oslo_messaging.rpc.server with catch_all(pd.uuid): [ 819.171553] env[69367]: ERROR oslo_messaging.rpc.server File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 819.171553] env[69367]: ERROR oslo_messaging.rpc.server self.gen.throw(typ, value, traceback) [ 819.171553] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 819.171553] env[69367]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderSyncFailed() [ 819.171553] env[69367]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 819.171553] env[69367]: ERROR oslo_messaging.rpc.server [ 819.171553] env[69367]: DEBUG nova.compute.manager [None req-6d85fd21-9fe9-4ff3-9e1e-024b8ac24ad1 tempest-ServerShowV254Test-432952848 tempest-ServerShowV254Test-432952848-project-member] [instance: 484ce161-5686-4573-8eed-4ebb3505e843] Build of instance 484ce161-5686-4573-8eed-4ebb3505e843 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 819.171553] env[69367]: DEBUG nova.compute.manager [None req-6d85fd21-9fe9-4ff3-9e1e-024b8ac24ad1 tempest-ServerShowV254Test-432952848 tempest-ServerShowV254Test-432952848-project-member] [instance: 484ce161-5686-4573-8eed-4ebb3505e843] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 819.171553] env[69367]: DEBUG oslo_concurrency.lockutils [None req-6d85fd21-9fe9-4ff3-9e1e-024b8ac24ad1 tempest-ServerShowV254Test-432952848 tempest-ServerShowV254Test-432952848-project-member] Acquiring lock "refresh_cache-484ce161-5686-4573-8eed-4ebb3505e843" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 819.172903] env[69367]: DEBUG oslo_concurrency.lockutils [None req-6d85fd21-9fe9-4ff3-9e1e-024b8ac24ad1 tempest-ServerShowV254Test-432952848 tempest-ServerShowV254Test-432952848-project-member] Acquired lock "refresh_cache-484ce161-5686-4573-8eed-4ebb3505e843" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 819.172903] env[69367]: DEBUG nova.network.neutron [None req-6d85fd21-9fe9-4ff3-9e1e-024b8ac24ad1 tempest-ServerShowV254Test-432952848 tempest-ServerShowV254Test-432952848-project-member] [instance: 484ce161-5686-4573-8eed-4ebb3505e843] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 819.275589] env[69367]: INFO nova.compute.manager [None req-2b7dd111-b8dc-4eab-92ec-fa522d44b4d5 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] [instance: eab70948-bb67-4f56-9f35-65e164fd5990] Took 1.02 seconds to deallocate network for instance. [ 819.624019] env[69367]: DEBUG nova.compute.manager [None req-0bfe79aa-15cb-4630-92d3-2de33fe7c748 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Start destroying the instance on the hypervisor. {{(pid=69367) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 819.624019] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-0bfe79aa-15cb-4630-92d3-2de33fe7c748 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Destroying instance {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 819.624019] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4841c79a-2f78-4424-87e8-5dd7eec32580 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.633023] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-0bfe79aa-15cb-4630-92d3-2de33fe7c748 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Powering off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 819.633023] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c9430610-fe30-4c75-b8d6-db9ca3987505 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.641874] env[69367]: DEBUG oslo_vmware.api [None req-0bfe79aa-15cb-4630-92d3-2de33fe7c748 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Waiting for the task: (returnval){ [ 819.641874] env[69367]: value = "task-4234002" [ 819.641874] env[69367]: _type = "Task" [ 819.641874] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.651111] env[69367]: DEBUG oslo_vmware.api [None req-0bfe79aa-15cb-4630-92d3-2de33fe7c748 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Task: {'id': task-4234002, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.714211] env[69367]: DEBUG nova.network.neutron [None req-6d85fd21-9fe9-4ff3-9e1e-024b8ac24ad1 tempest-ServerShowV254Test-432952848 tempest-ServerShowV254Test-432952848-project-member] [instance: 484ce161-5686-4573-8eed-4ebb3505e843] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 819.847725] env[69367]: DEBUG nova.network.neutron [None req-6d85fd21-9fe9-4ff3-9e1e-024b8ac24ad1 tempest-ServerShowV254Test-432952848 tempest-ServerShowV254Test-432952848-project-member] [instance: 484ce161-5686-4573-8eed-4ebb3505e843] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 819.965672] env[69367]: DEBUG nova.network.neutron [None req-cd3b457f-f206-4206-8992-b8b6ee33c044 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Updating instance_info_cache with network_info: [{"id": "00ad3cfd-f282-442d-b152-85e841dd8a16", "address": "fa:16:3e:17:02:f1", "network": {"id": "a8e5c1a6-2526-4042-8a8d-fdb54dbe7bf9", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1429177141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.254", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f5bc3d470905412ea72a8eedb98e9e47", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e6f11c0d-c73a-47f5-b02e-47bff48da0e4", "external-id": "nsx-vlan-transportzone-345", "segmentation_id": 345, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap00ad3cfd-f2", "ovs_interfaceid": "00ad3cfd-f282-442d-b152-85e841dd8a16", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 820.162323] env[69367]: DEBUG oslo_vmware.api [None req-0bfe79aa-15cb-4630-92d3-2de33fe7c748 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Task: {'id': task-4234002, 'name': PowerOffVM_Task, 'duration_secs': 0.214639} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.162323] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-0bfe79aa-15cb-4630-92d3-2de33fe7c748 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Powered off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 820.162323] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-0bfe79aa-15cb-4630-92d3-2de33fe7c748 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Unregistering the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 820.162323] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-49f55db0-5956-4297-8413-f53e147ff7d5 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.200904] env[69367]: DEBUG nova.scheduler.client.report [None req-3d051d46-95b5-458b-8d77-42673c6fbe04 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 820.220026] env[69367]: DEBUG nova.scheduler.client.report [None req-3d051d46-95b5-458b-8d77-42673c6fbe04 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 820.220026] env[69367]: DEBUG nova.compute.provider_tree [None req-3d051d46-95b5-458b-8d77-42673c6fbe04 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 820.237609] env[69367]: DEBUG nova.scheduler.client.report [None req-3d051d46-95b5-458b-8d77-42673c6fbe04 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 820.247363] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-0bfe79aa-15cb-4630-92d3-2de33fe7c748 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Unregistered the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 820.247363] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-0bfe79aa-15cb-4630-92d3-2de33fe7c748 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Deleting contents of the VM from datastore datastore1 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 820.247363] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-0bfe79aa-15cb-4630-92d3-2de33fe7c748 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Deleting the datastore file [datastore1] 837b4093-308b-440b-940d-fc0227a5c590 {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 820.247642] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8d291492-3824-456a-960c-328134527a80 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.258012] env[69367]: DEBUG oslo_vmware.api [None req-0bfe79aa-15cb-4630-92d3-2de33fe7c748 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Waiting for the task: (returnval){ [ 820.258012] env[69367]: value = "task-4234004" [ 820.258012] env[69367]: _type = "Task" [ 820.258012] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.266173] env[69367]: DEBUG oslo_vmware.api [None req-0bfe79aa-15cb-4630-92d3-2de33fe7c748 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Task: {'id': task-4234004, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.267373] env[69367]: DEBUG nova.scheduler.client.report [None req-3d051d46-95b5-458b-8d77-42673c6fbe04 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 820.318289] env[69367]: INFO nova.scheduler.client.report [None req-2b7dd111-b8dc-4eab-92ec-fa522d44b4d5 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Deleted allocations for instance eab70948-bb67-4f56-9f35-65e164fd5990 [ 820.351202] env[69367]: DEBUG oslo_concurrency.lockutils [None req-6d85fd21-9fe9-4ff3-9e1e-024b8ac24ad1 tempest-ServerShowV254Test-432952848 tempest-ServerShowV254Test-432952848-project-member] Releasing lock "refresh_cache-484ce161-5686-4573-8eed-4ebb3505e843" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 820.351202] env[69367]: DEBUG nova.compute.manager [None req-6d85fd21-9fe9-4ff3-9e1e-024b8ac24ad1 tempest-ServerShowV254Test-432952848 tempest-ServerShowV254Test-432952848-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 820.351202] env[69367]: DEBUG nova.compute.manager [None req-6d85fd21-9fe9-4ff3-9e1e-024b8ac24ad1 tempest-ServerShowV254Test-432952848 tempest-ServerShowV254Test-432952848-project-member] [instance: 484ce161-5686-4573-8eed-4ebb3505e843] Skipping network deallocation for instance since networking was not requested. {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 820.470808] env[69367]: DEBUG oslo_concurrency.lockutils [None req-cd3b457f-f206-4206-8992-b8b6ee33c044 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Releasing lock "refresh_cache-837b4093-308b-440b-940d-fc0227a5c590" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 820.749510] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a39d128-b3fb-42e3-8577-a4ceeef271c9 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.760903] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b0ed9ce-5468-4b31-b5a2-38b4218ef1e4 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.769391] env[69367]: DEBUG oslo_vmware.api [None req-0bfe79aa-15cb-4630-92d3-2de33fe7c748 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Task: {'id': task-4234004, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.157224} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.794294] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-0bfe79aa-15cb-4630-92d3-2de33fe7c748 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Deleted the datastore file {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 820.794528] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-0bfe79aa-15cb-4630-92d3-2de33fe7c748 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Deleted contents of the VM from datastore datastore1 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 820.794709] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-0bfe79aa-15cb-4630-92d3-2de33fe7c748 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Instance destroyed {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 820.794883] env[69367]: INFO nova.compute.manager [None req-0bfe79aa-15cb-4630-92d3-2de33fe7c748 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Took 1.17 seconds to destroy the instance on the hypervisor. [ 820.796110] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0bfe79aa-15cb-4630-92d3-2de33fe7c748 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 820.796110] env[69367]: DEBUG nova.compute.manager [-] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 820.796110] env[69367]: DEBUG nova.network.neutron [-] [instance: 837b4093-308b-440b-940d-fc0227a5c590] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 820.797922] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b18b0637-407e-4966-acd7-3c5ea824efba {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.806789] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a836c4d-1f13-4538-8b32-dc0f80f6bd55 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.822457] env[69367]: DEBUG nova.compute.provider_tree [None req-3d051d46-95b5-458b-8d77-42673c6fbe04 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 820.831741] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2b7dd111-b8dc-4eab-92ec-fa522d44b4d5 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Lock "eab70948-bb67-4f56-9f35-65e164fd5990" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 65.667s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 820.973590] env[69367]: DEBUG oslo_concurrency.lockutils [None req-cd3b457f-f206-4206-8992-b8b6ee33c044 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Lock "interface-837b4093-308b-440b-940d-fc0227a5c590-adb5a8dc-4297-4171-b593-e1230f1746c7" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.830s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 821.334631] env[69367]: DEBUG nova.compute.manager [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] [instance: d900df05-b65c-4a45-94d1-563afbf9c022] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 821.356279] env[69367]: ERROR nova.scheduler.client.report [None req-3d051d46-95b5-458b-8d77-42673c6fbe04 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [req-38d64aed-f8c2-4399-a399-9c0adc840415] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-38d64aed-f8c2-4399-a399-9c0adc840415"}]} [ 821.356279] env[69367]: DEBUG oslo_concurrency.lockutils [None req-3d051d46-95b5-458b-8d77-42673c6fbe04 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.192s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 821.356279] env[69367]: ERROR nova.compute.manager [None req-3d051d46-95b5-458b-8d77-42673c6fbe04 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: bb59f765-0d86-4803-845c-8186e9341702] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 821.356279] env[69367]: ERROR nova.compute.manager [instance: bb59f765-0d86-4803-845c-8186e9341702] Traceback (most recent call last): [ 821.356279] env[69367]: ERROR nova.compute.manager [instance: bb59f765-0d86-4803-845c-8186e9341702] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 821.356279] env[69367]: ERROR nova.compute.manager [instance: bb59f765-0d86-4803-845c-8186e9341702] yield [ 821.356279] env[69367]: ERROR nova.compute.manager [instance: bb59f765-0d86-4803-845c-8186e9341702] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 821.356279] env[69367]: ERROR nova.compute.manager [instance: bb59f765-0d86-4803-845c-8186e9341702] self.set_inventory_for_provider( [ 821.356279] env[69367]: ERROR nova.compute.manager [instance: bb59f765-0d86-4803-845c-8186e9341702] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 821.356279] env[69367]: ERROR nova.compute.manager [instance: bb59f765-0d86-4803-845c-8186e9341702] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 821.356279] env[69367]: ERROR nova.compute.manager [instance: bb59f765-0d86-4803-845c-8186e9341702] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-38d64aed-f8c2-4399-a399-9c0adc840415"}]} [ 821.356279] env[69367]: ERROR nova.compute.manager [instance: bb59f765-0d86-4803-845c-8186e9341702] [ 821.356279] env[69367]: ERROR nova.compute.manager [instance: bb59f765-0d86-4803-845c-8186e9341702] During handling of the above exception, another exception occurred: [ 821.356279] env[69367]: ERROR nova.compute.manager [instance: bb59f765-0d86-4803-845c-8186e9341702] [ 821.356279] env[69367]: ERROR nova.compute.manager [instance: bb59f765-0d86-4803-845c-8186e9341702] Traceback (most recent call last): [ 821.356279] env[69367]: ERROR nova.compute.manager [instance: bb59f765-0d86-4803-845c-8186e9341702] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 821.356279] env[69367]: ERROR nova.compute.manager [instance: bb59f765-0d86-4803-845c-8186e9341702] with self.rt.instance_claim(context, instance, node, allocs, [ 821.356279] env[69367]: ERROR nova.compute.manager [instance: bb59f765-0d86-4803-845c-8186e9341702] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 821.356279] env[69367]: ERROR nova.compute.manager [instance: bb59f765-0d86-4803-845c-8186e9341702] return f(*args, **kwargs) [ 821.356279] env[69367]: ERROR nova.compute.manager [instance: bb59f765-0d86-4803-845c-8186e9341702] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 821.356279] env[69367]: ERROR nova.compute.manager [instance: bb59f765-0d86-4803-845c-8186e9341702] self._update(elevated, cn) [ 821.356279] env[69367]: ERROR nova.compute.manager [instance: bb59f765-0d86-4803-845c-8186e9341702] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 821.356279] env[69367]: ERROR nova.compute.manager [instance: bb59f765-0d86-4803-845c-8186e9341702] self._update_to_placement(context, compute_node, startup) [ 821.356279] env[69367]: ERROR nova.compute.manager [instance: bb59f765-0d86-4803-845c-8186e9341702] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 821.356279] env[69367]: ERROR nova.compute.manager [instance: bb59f765-0d86-4803-845c-8186e9341702] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 821.357215] env[69367]: ERROR nova.compute.manager [instance: bb59f765-0d86-4803-845c-8186e9341702] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 821.357215] env[69367]: ERROR nova.compute.manager [instance: bb59f765-0d86-4803-845c-8186e9341702] return attempt.get(self._wrap_exception) [ 821.357215] env[69367]: ERROR nova.compute.manager [instance: bb59f765-0d86-4803-845c-8186e9341702] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 821.357215] env[69367]: ERROR nova.compute.manager [instance: bb59f765-0d86-4803-845c-8186e9341702] six.reraise(self.value[0], self.value[1], self.value[2]) [ 821.357215] env[69367]: ERROR nova.compute.manager [instance: bb59f765-0d86-4803-845c-8186e9341702] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 821.357215] env[69367]: ERROR nova.compute.manager [instance: bb59f765-0d86-4803-845c-8186e9341702] raise value [ 821.357215] env[69367]: ERROR nova.compute.manager [instance: bb59f765-0d86-4803-845c-8186e9341702] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 821.357215] env[69367]: ERROR nova.compute.manager [instance: bb59f765-0d86-4803-845c-8186e9341702] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 821.357215] env[69367]: ERROR nova.compute.manager [instance: bb59f765-0d86-4803-845c-8186e9341702] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 821.357215] env[69367]: ERROR nova.compute.manager [instance: bb59f765-0d86-4803-845c-8186e9341702] self.reportclient.update_from_provider_tree( [ 821.357215] env[69367]: ERROR nova.compute.manager [instance: bb59f765-0d86-4803-845c-8186e9341702] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 821.357215] env[69367]: ERROR nova.compute.manager [instance: bb59f765-0d86-4803-845c-8186e9341702] with catch_all(pd.uuid): [ 821.357215] env[69367]: ERROR nova.compute.manager [instance: bb59f765-0d86-4803-845c-8186e9341702] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 821.357215] env[69367]: ERROR nova.compute.manager [instance: bb59f765-0d86-4803-845c-8186e9341702] self.gen.throw(typ, value, traceback) [ 821.357215] env[69367]: ERROR nova.compute.manager [instance: bb59f765-0d86-4803-845c-8186e9341702] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 821.357215] env[69367]: ERROR nova.compute.manager [instance: bb59f765-0d86-4803-845c-8186e9341702] raise exception.ResourceProviderSyncFailed() [ 821.357215] env[69367]: ERROR nova.compute.manager [instance: bb59f765-0d86-4803-845c-8186e9341702] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 821.357215] env[69367]: ERROR nova.compute.manager [instance: bb59f765-0d86-4803-845c-8186e9341702] [ 821.357215] env[69367]: DEBUG nova.compute.utils [None req-3d051d46-95b5-458b-8d77-42673c6fbe04 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: bb59f765-0d86-4803-845c-8186e9341702] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 821.361027] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fbe32c85-05fd-4b80-8aa6-4b5adf69b91f tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.051s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 821.361027] env[69367]: INFO nova.compute.claims [None req-fbe32c85-05fd-4b80-8aa6-4b5adf69b91f tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: 69d2e230-1c19-4a76-a517-ee7c77854f5c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 821.362913] env[69367]: DEBUG nova.compute.manager [None req-3d051d46-95b5-458b-8d77-42673c6fbe04 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: bb59f765-0d86-4803-845c-8186e9341702] Build of instance bb59f765-0d86-4803-845c-8186e9341702 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 821.363391] env[69367]: DEBUG nova.compute.manager [None req-3d051d46-95b5-458b-8d77-42673c6fbe04 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: bb59f765-0d86-4803-845c-8186e9341702] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 821.363611] env[69367]: DEBUG oslo_concurrency.lockutils [None req-3d051d46-95b5-458b-8d77-42673c6fbe04 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Acquiring lock "refresh_cache-bb59f765-0d86-4803-845c-8186e9341702" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 821.363754] env[69367]: DEBUG oslo_concurrency.lockutils [None req-3d051d46-95b5-458b-8d77-42673c6fbe04 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Acquired lock "refresh_cache-bb59f765-0d86-4803-845c-8186e9341702" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 821.363908] env[69367]: DEBUG nova.network.neutron [None req-3d051d46-95b5-458b-8d77-42673c6fbe04 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: bb59f765-0d86-4803-845c-8186e9341702] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 821.404141] env[69367]: INFO nova.scheduler.client.report [None req-6d85fd21-9fe9-4ff3-9e1e-024b8ac24ad1 tempest-ServerShowV254Test-432952848 tempest-ServerShowV254Test-432952848-project-member] Deleted allocations for instance 484ce161-5686-4573-8eed-4ebb3505e843 [ 821.561294] env[69367]: DEBUG nova.compute.manager [req-e75feb20-4385-4eb3-9105-3724197bf069 req-4d85a939-7138-4c02-93c9-a51aba776c82 service nova] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Received event network-vif-deleted-00ad3cfd-f282-442d-b152-85e841dd8a16 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 821.561599] env[69367]: INFO nova.compute.manager [req-e75feb20-4385-4eb3-9105-3724197bf069 req-4d85a939-7138-4c02-93c9-a51aba776c82 service nova] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Neutron deleted interface 00ad3cfd-f282-442d-b152-85e841dd8a16; detaching it from the instance and deleting it from the info cache [ 821.562370] env[69367]: DEBUG nova.network.neutron [req-e75feb20-4385-4eb3-9105-3724197bf069 req-4d85a939-7138-4c02-93c9-a51aba776c82 service nova] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 821.856117] env[69367]: DEBUG oslo_concurrency.lockutils [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 821.890562] env[69367]: DEBUG nova.network.neutron [None req-3d051d46-95b5-458b-8d77-42673c6fbe04 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: bb59f765-0d86-4803-845c-8186e9341702] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 821.913531] env[69367]: DEBUG oslo_concurrency.lockutils [None req-6d85fd21-9fe9-4ff3-9e1e-024b8ac24ad1 tempest-ServerShowV254Test-432952848 tempest-ServerShowV254Test-432952848-project-member] Lock "484ce161-5686-4573-8eed-4ebb3505e843" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 59.709s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 822.004748] env[69367]: DEBUG nova.network.neutron [-] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 822.007367] env[69367]: DEBUG nova.network.neutron [None req-3d051d46-95b5-458b-8d77-42673c6fbe04 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: bb59f765-0d86-4803-845c-8186e9341702] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 822.066157] env[69367]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f088c49c-5e80-4bf6-be10-40ab6e70cc1c {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.077080] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37d853e8-7318-43f8-a88e-b99534de1fde {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.105106] env[69367]: DEBUG nova.compute.manager [req-e75feb20-4385-4eb3-9105-3724197bf069 req-4d85a939-7138-4c02-93c9-a51aba776c82 service nova] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Detach interface failed, port_id=00ad3cfd-f282-442d-b152-85e841dd8a16, reason: Instance 837b4093-308b-440b-940d-fc0227a5c590 could not be found. {{(pid=69367) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11601}} [ 822.136941] env[69367]: DEBUG oslo_concurrency.lockutils [None req-17171b96-ca57-47e5-925e-adb21efe7e04 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Acquiring lock "f8c07fa1-d27c-4d0f-847b-481477cd04bf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 822.137274] env[69367]: DEBUG oslo_concurrency.lockutils [None req-17171b96-ca57-47e5-925e-adb21efe7e04 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Lock "f8c07fa1-d27c-4d0f-847b-481477cd04bf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 822.137517] env[69367]: DEBUG oslo_concurrency.lockutils [None req-17171b96-ca57-47e5-925e-adb21efe7e04 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Acquiring lock "f8c07fa1-d27c-4d0f-847b-481477cd04bf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 822.137723] env[69367]: DEBUG oslo_concurrency.lockutils [None req-17171b96-ca57-47e5-925e-adb21efe7e04 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Lock "f8c07fa1-d27c-4d0f-847b-481477cd04bf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 822.137903] env[69367]: DEBUG oslo_concurrency.lockutils [None req-17171b96-ca57-47e5-925e-adb21efe7e04 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Lock "f8c07fa1-d27c-4d0f-847b-481477cd04bf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 822.140826] env[69367]: INFO nova.compute.manager [None req-17171b96-ca57-47e5-925e-adb21efe7e04 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] Terminating instance [ 822.393272] env[69367]: DEBUG nova.scheduler.client.report [None req-fbe32c85-05fd-4b80-8aa6-4b5adf69b91f tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 822.416437] env[69367]: DEBUG nova.compute.manager [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 822.425766] env[69367]: DEBUG nova.scheduler.client.report [None req-fbe32c85-05fd-4b80-8aa6-4b5adf69b91f tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 822.425987] env[69367]: DEBUG nova.compute.provider_tree [None req-fbe32c85-05fd-4b80-8aa6-4b5adf69b91f tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 822.447307] env[69367]: DEBUG nova.scheduler.client.report [None req-fbe32c85-05fd-4b80-8aa6-4b5adf69b91f tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 822.471086] env[69367]: DEBUG nova.scheduler.client.report [None req-fbe32c85-05fd-4b80-8aa6-4b5adf69b91f tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 822.513798] env[69367]: INFO nova.compute.manager [-] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Took 1.72 seconds to deallocate network for instance. [ 822.514375] env[69367]: DEBUG oslo_concurrency.lockutils [None req-3d051d46-95b5-458b-8d77-42673c6fbe04 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Releasing lock "refresh_cache-bb59f765-0d86-4803-845c-8186e9341702" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 822.514600] env[69367]: DEBUG nova.compute.manager [None req-3d051d46-95b5-458b-8d77-42673c6fbe04 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 822.514834] env[69367]: DEBUG nova.compute.manager [None req-3d051d46-95b5-458b-8d77-42673c6fbe04 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: bb59f765-0d86-4803-845c-8186e9341702] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 822.515041] env[69367]: DEBUG nova.network.neutron [None req-3d051d46-95b5-458b-8d77-42673c6fbe04 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: bb59f765-0d86-4803-845c-8186e9341702] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 822.548732] env[69367]: DEBUG nova.network.neutron [None req-3d051d46-95b5-458b-8d77-42673c6fbe04 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: bb59f765-0d86-4803-845c-8186e9341702] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 822.644897] env[69367]: DEBUG nova.compute.manager [None req-17171b96-ca57-47e5-925e-adb21efe7e04 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] Start destroying the instance on the hypervisor. {{(pid=69367) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 822.645962] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-17171b96-ca57-47e5-925e-adb21efe7e04 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] Destroying instance {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 822.647112] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc7b4928-c2b0-446a-9ffa-e3c290ba6de9 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.658805] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-17171b96-ca57-47e5-925e-adb21efe7e04 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] Powering off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 822.659248] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-30b3c900-ad1f-4897-ba1e-44ca2f29bb3f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.668302] env[69367]: DEBUG oslo_vmware.api [None req-17171b96-ca57-47e5-925e-adb21efe7e04 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Waiting for the task: (returnval){ [ 822.668302] env[69367]: value = "task-4234005" [ 822.668302] env[69367]: _type = "Task" [ 822.668302] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.678198] env[69367]: DEBUG oslo_vmware.api [None req-17171b96-ca57-47e5-925e-adb21efe7e04 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Task: {'id': task-4234005, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.937651] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 822.941093] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4bc8e32-8d49-4531-ac9c-9139d7e7cd9b {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.948738] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f412419a-9dc3-4d4b-b04d-a1794d77dd34 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.980422] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0467dbad-dd05-497d-92cb-9a9dd70f053d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.988713] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebdc7955-dbf7-4084-82c7-9d6fca732791 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.002842] env[69367]: DEBUG nova.compute.provider_tree [None req-fbe32c85-05fd-4b80-8aa6-4b5adf69b91f tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 823.021718] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0efedbe2-1e1f-4c5c-8e21-05ff0a267fd9 tempest-ServerActionsTestOtherB-179286054 tempest-ServerActionsTestOtherB-179286054-project-member] Acquiring lock "2b2a47ca-47d7-43bb-80cd-801e08f327ec" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 823.021973] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0efedbe2-1e1f-4c5c-8e21-05ff0a267fd9 tempest-ServerActionsTestOtherB-179286054 tempest-ServerActionsTestOtherB-179286054-project-member] Lock "2b2a47ca-47d7-43bb-80cd-801e08f327ec" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 823.030483] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0bfe79aa-15cb-4630-92d3-2de33fe7c748 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 823.055245] env[69367]: DEBUG nova.network.neutron [None req-3d051d46-95b5-458b-8d77-42673c6fbe04 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: bb59f765-0d86-4803-845c-8186e9341702] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 823.176800] env[69367]: DEBUG oslo_vmware.api [None req-17171b96-ca57-47e5-925e-adb21efe7e04 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Task: {'id': task-4234005, 'name': PowerOffVM_Task, 'duration_secs': 0.220423} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.177089] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-17171b96-ca57-47e5-925e-adb21efe7e04 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] Powered off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 823.177265] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-17171b96-ca57-47e5-925e-adb21efe7e04 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] Unregistering the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 823.177522] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-28b9b420-d977-44a4-a158-510816ca7eb5 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.245836] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-17171b96-ca57-47e5-925e-adb21efe7e04 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] Unregistered the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 823.246075] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-17171b96-ca57-47e5-925e-adb21efe7e04 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] Deleting contents of the VM from datastore datastore2 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 823.246397] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-17171b96-ca57-47e5-925e-adb21efe7e04 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Deleting the datastore file [datastore2] f8c07fa1-d27c-4d0f-847b-481477cd04bf {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 823.246652] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e27f8e83-9b32-488b-949f-05c02a11a21f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.254137] env[69367]: DEBUG oslo_vmware.api [None req-17171b96-ca57-47e5-925e-adb21efe7e04 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Waiting for the task: (returnval){ [ 823.254137] env[69367]: value = "task-4234007" [ 823.254137] env[69367]: _type = "Task" [ 823.254137] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.262549] env[69367]: DEBUG oslo_vmware.api [None req-17171b96-ca57-47e5-925e-adb21efe7e04 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Task: {'id': task-4234007, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.525864] env[69367]: ERROR nova.scheduler.client.report [None req-fbe32c85-05fd-4b80-8aa6-4b5adf69b91f tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [req-dd66e333-0b64-4fec-ba4f-db3aa05bd672] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-dd66e333-0b64-4fec-ba4f-db3aa05bd672"}]} [ 823.526290] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fbe32c85-05fd-4b80-8aa6-4b5adf69b91f tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.168s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 823.526896] env[69367]: ERROR nova.compute.manager [None req-fbe32c85-05fd-4b80-8aa6-4b5adf69b91f tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: 69d2e230-1c19-4a76-a517-ee7c77854f5c] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 823.526896] env[69367]: ERROR nova.compute.manager [instance: 69d2e230-1c19-4a76-a517-ee7c77854f5c] Traceback (most recent call last): [ 823.526896] env[69367]: ERROR nova.compute.manager [instance: 69d2e230-1c19-4a76-a517-ee7c77854f5c] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 823.526896] env[69367]: ERROR nova.compute.manager [instance: 69d2e230-1c19-4a76-a517-ee7c77854f5c] yield [ 823.526896] env[69367]: ERROR nova.compute.manager [instance: 69d2e230-1c19-4a76-a517-ee7c77854f5c] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 823.526896] env[69367]: ERROR nova.compute.manager [instance: 69d2e230-1c19-4a76-a517-ee7c77854f5c] self.set_inventory_for_provider( [ 823.526896] env[69367]: ERROR nova.compute.manager [instance: 69d2e230-1c19-4a76-a517-ee7c77854f5c] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 823.526896] env[69367]: ERROR nova.compute.manager [instance: 69d2e230-1c19-4a76-a517-ee7c77854f5c] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 823.526896] env[69367]: ERROR nova.compute.manager [instance: 69d2e230-1c19-4a76-a517-ee7c77854f5c] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-dd66e333-0b64-4fec-ba4f-db3aa05bd672"}]} [ 823.526896] env[69367]: ERROR nova.compute.manager [instance: 69d2e230-1c19-4a76-a517-ee7c77854f5c] [ 823.526896] env[69367]: ERROR nova.compute.manager [instance: 69d2e230-1c19-4a76-a517-ee7c77854f5c] During handling of the above exception, another exception occurred: [ 823.526896] env[69367]: ERROR nova.compute.manager [instance: 69d2e230-1c19-4a76-a517-ee7c77854f5c] [ 823.526896] env[69367]: ERROR nova.compute.manager [instance: 69d2e230-1c19-4a76-a517-ee7c77854f5c] Traceback (most recent call last): [ 823.526896] env[69367]: ERROR nova.compute.manager [instance: 69d2e230-1c19-4a76-a517-ee7c77854f5c] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 823.526896] env[69367]: ERROR nova.compute.manager [instance: 69d2e230-1c19-4a76-a517-ee7c77854f5c] with self.rt.instance_claim(context, instance, node, allocs, [ 823.526896] env[69367]: ERROR nova.compute.manager [instance: 69d2e230-1c19-4a76-a517-ee7c77854f5c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 823.526896] env[69367]: ERROR nova.compute.manager [instance: 69d2e230-1c19-4a76-a517-ee7c77854f5c] return f(*args, **kwargs) [ 823.526896] env[69367]: ERROR nova.compute.manager [instance: 69d2e230-1c19-4a76-a517-ee7c77854f5c] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 823.526896] env[69367]: ERROR nova.compute.manager [instance: 69d2e230-1c19-4a76-a517-ee7c77854f5c] self._update(elevated, cn) [ 823.526896] env[69367]: ERROR nova.compute.manager [instance: 69d2e230-1c19-4a76-a517-ee7c77854f5c] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 823.526896] env[69367]: ERROR nova.compute.manager [instance: 69d2e230-1c19-4a76-a517-ee7c77854f5c] self._update_to_placement(context, compute_node, startup) [ 823.526896] env[69367]: ERROR nova.compute.manager [instance: 69d2e230-1c19-4a76-a517-ee7c77854f5c] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 823.526896] env[69367]: ERROR nova.compute.manager [instance: 69d2e230-1c19-4a76-a517-ee7c77854f5c] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 823.526896] env[69367]: ERROR nova.compute.manager [instance: 69d2e230-1c19-4a76-a517-ee7c77854f5c] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 823.526896] env[69367]: ERROR nova.compute.manager [instance: 69d2e230-1c19-4a76-a517-ee7c77854f5c] return attempt.get(self._wrap_exception) [ 823.526896] env[69367]: ERROR nova.compute.manager [instance: 69d2e230-1c19-4a76-a517-ee7c77854f5c] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 823.526896] env[69367]: ERROR nova.compute.manager [instance: 69d2e230-1c19-4a76-a517-ee7c77854f5c] six.reraise(self.value[0], self.value[1], self.value[2]) [ 823.526896] env[69367]: ERROR nova.compute.manager [instance: 69d2e230-1c19-4a76-a517-ee7c77854f5c] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 823.526896] env[69367]: ERROR nova.compute.manager [instance: 69d2e230-1c19-4a76-a517-ee7c77854f5c] raise value [ 823.526896] env[69367]: ERROR nova.compute.manager [instance: 69d2e230-1c19-4a76-a517-ee7c77854f5c] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 823.526896] env[69367]: ERROR nova.compute.manager [instance: 69d2e230-1c19-4a76-a517-ee7c77854f5c] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 823.526896] env[69367]: ERROR nova.compute.manager [instance: 69d2e230-1c19-4a76-a517-ee7c77854f5c] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 823.526896] env[69367]: ERROR nova.compute.manager [instance: 69d2e230-1c19-4a76-a517-ee7c77854f5c] self.reportclient.update_from_provider_tree( [ 823.526896] env[69367]: ERROR nova.compute.manager [instance: 69d2e230-1c19-4a76-a517-ee7c77854f5c] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 823.528093] env[69367]: ERROR nova.compute.manager [instance: 69d2e230-1c19-4a76-a517-ee7c77854f5c] with catch_all(pd.uuid): [ 823.528093] env[69367]: ERROR nova.compute.manager [instance: 69d2e230-1c19-4a76-a517-ee7c77854f5c] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 823.528093] env[69367]: ERROR nova.compute.manager [instance: 69d2e230-1c19-4a76-a517-ee7c77854f5c] self.gen.throw(typ, value, traceback) [ 823.528093] env[69367]: ERROR nova.compute.manager [instance: 69d2e230-1c19-4a76-a517-ee7c77854f5c] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 823.528093] env[69367]: ERROR nova.compute.manager [instance: 69d2e230-1c19-4a76-a517-ee7c77854f5c] raise exception.ResourceProviderSyncFailed() [ 823.528093] env[69367]: ERROR nova.compute.manager [instance: 69d2e230-1c19-4a76-a517-ee7c77854f5c] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 823.528093] env[69367]: ERROR nova.compute.manager [instance: 69d2e230-1c19-4a76-a517-ee7c77854f5c] [ 823.528093] env[69367]: DEBUG nova.compute.utils [None req-fbe32c85-05fd-4b80-8aa6-4b5adf69b91f tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: 69d2e230-1c19-4a76-a517-ee7c77854f5c] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 823.528823] env[69367]: DEBUG oslo_concurrency.lockutils [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.148s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 823.530482] env[69367]: INFO nova.compute.claims [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 823.533137] env[69367]: DEBUG nova.compute.manager [None req-fbe32c85-05fd-4b80-8aa6-4b5adf69b91f tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: 69d2e230-1c19-4a76-a517-ee7c77854f5c] Build of instance 69d2e230-1c19-4a76-a517-ee7c77854f5c was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 823.533558] env[69367]: DEBUG nova.compute.manager [None req-fbe32c85-05fd-4b80-8aa6-4b5adf69b91f tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: 69d2e230-1c19-4a76-a517-ee7c77854f5c] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 823.533786] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fbe32c85-05fd-4b80-8aa6-4b5adf69b91f tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Acquiring lock "refresh_cache-69d2e230-1c19-4a76-a517-ee7c77854f5c" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 823.533932] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fbe32c85-05fd-4b80-8aa6-4b5adf69b91f tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Acquired lock "refresh_cache-69d2e230-1c19-4a76-a517-ee7c77854f5c" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 823.534102] env[69367]: DEBUG nova.network.neutron [None req-fbe32c85-05fd-4b80-8aa6-4b5adf69b91f tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: 69d2e230-1c19-4a76-a517-ee7c77854f5c] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 823.558876] env[69367]: INFO nova.compute.manager [None req-3d051d46-95b5-458b-8d77-42673c6fbe04 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: bb59f765-0d86-4803-845c-8186e9341702] Took 1.04 seconds to deallocate network for instance. [ 823.764661] env[69367]: DEBUG oslo_vmware.api [None req-17171b96-ca57-47e5-925e-adb21efe7e04 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Task: {'id': task-4234007, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.192932} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.764935] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-17171b96-ca57-47e5-925e-adb21efe7e04 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Deleted the datastore file {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 823.765177] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-17171b96-ca57-47e5-925e-adb21efe7e04 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] Deleted contents of the VM from datastore datastore2 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 823.765380] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-17171b96-ca57-47e5-925e-adb21efe7e04 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] Instance destroyed {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 823.765556] env[69367]: INFO nova.compute.manager [None req-17171b96-ca57-47e5-925e-adb21efe7e04 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] Took 1.12 seconds to destroy the instance on the hypervisor. [ 823.765796] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-17171b96-ca57-47e5-925e-adb21efe7e04 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 823.765984] env[69367]: DEBUG nova.compute.manager [-] [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 823.766150] env[69367]: DEBUG nova.network.neutron [-] [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 824.007822] env[69367]: DEBUG nova.compute.manager [req-ac6ad430-bf6b-4e15-b411-aa1438e3811f req-66436411-c935-453f-a7a7-c158e2dc39c2 service nova] [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] Received event network-vif-deleted-b4b0bb3d-a7c4-42e8-99f2-9078bf5e2b6c {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 824.008112] env[69367]: INFO nova.compute.manager [req-ac6ad430-bf6b-4e15-b411-aa1438e3811f req-66436411-c935-453f-a7a7-c158e2dc39c2 service nova] [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] Neutron deleted interface b4b0bb3d-a7c4-42e8-99f2-9078bf5e2b6c; detaching it from the instance and deleting it from the info cache [ 824.008246] env[69367]: DEBUG nova.network.neutron [req-ac6ad430-bf6b-4e15-b411-aa1438e3811f req-66436411-c935-453f-a7a7-c158e2dc39c2 service nova] [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 824.054991] env[69367]: DEBUG nova.network.neutron [None req-fbe32c85-05fd-4b80-8aa6-4b5adf69b91f tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: 69d2e230-1c19-4a76-a517-ee7c77854f5c] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 824.147927] env[69367]: DEBUG nova.network.neutron [None req-fbe32c85-05fd-4b80-8aa6-4b5adf69b91f tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: 69d2e230-1c19-4a76-a517-ee7c77854f5c] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 824.490145] env[69367]: DEBUG nova.network.neutron [-] [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 824.510350] env[69367]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-af46b237-7c25-4e60-a733-0ae47adf6d37 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.520633] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78d48e3d-1a5c-4518-9435-12cb816c831a {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.549489] env[69367]: DEBUG nova.compute.manager [req-ac6ad430-bf6b-4e15-b411-aa1438e3811f req-66436411-c935-453f-a7a7-c158e2dc39c2 service nova] [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] Detach interface failed, port_id=b4b0bb3d-a7c4-42e8-99f2-9078bf5e2b6c, reason: Instance f8c07fa1-d27c-4d0f-847b-481477cd04bf could not be found. {{(pid=69367) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11601}} [ 824.566866] env[69367]: DEBUG nova.scheduler.client.report [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 824.584495] env[69367]: DEBUG nova.scheduler.client.report [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 824.584738] env[69367]: DEBUG nova.compute.provider_tree [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 824.594065] env[69367]: INFO nova.scheduler.client.report [None req-3d051d46-95b5-458b-8d77-42673c6fbe04 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Deleted allocations for instance bb59f765-0d86-4803-845c-8186e9341702 [ 824.601180] env[69367]: DEBUG nova.scheduler.client.report [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 824.622131] env[69367]: DEBUG nova.scheduler.client.report [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 824.653616] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fbe32c85-05fd-4b80-8aa6-4b5adf69b91f tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Releasing lock "refresh_cache-69d2e230-1c19-4a76-a517-ee7c77854f5c" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 824.653870] env[69367]: DEBUG nova.compute.manager [None req-fbe32c85-05fd-4b80-8aa6-4b5adf69b91f tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 824.654070] env[69367]: DEBUG nova.compute.manager [None req-fbe32c85-05fd-4b80-8aa6-4b5adf69b91f tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: 69d2e230-1c19-4a76-a517-ee7c77854f5c] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 824.654244] env[69367]: DEBUG nova.network.neutron [None req-fbe32c85-05fd-4b80-8aa6-4b5adf69b91f tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: 69d2e230-1c19-4a76-a517-ee7c77854f5c] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 824.682639] env[69367]: DEBUG nova.network.neutron [None req-fbe32c85-05fd-4b80-8aa6-4b5adf69b91f tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: 69d2e230-1c19-4a76-a517-ee7c77854f5c] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 824.994019] env[69367]: INFO nova.compute.manager [-] [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] Took 1.23 seconds to deallocate network for instance. [ 824.999993] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70457d87-b2f5-47d3-b3a7-33bf43b9c36f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.009234] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee05d3b3-f341-473f-9196-861d6754396d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.046617] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-533ae576-f449-4638-b6c5-ba698dc0ce0e {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.055240] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bc36818-c247-46b6-be78-a6596b297132 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.070103] env[69367]: DEBUG nova.compute.provider_tree [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Inventory has not changed in ProviderTree for provider: 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 825.108169] env[69367]: DEBUG oslo_concurrency.lockutils [None req-3d051d46-95b5-458b-8d77-42673c6fbe04 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Lock "bb59f765-0d86-4803-845c-8186e9341702" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 60.261s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 825.185303] env[69367]: DEBUG nova.network.neutron [None req-fbe32c85-05fd-4b80-8aa6-4b5adf69b91f tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: 69d2e230-1c19-4a76-a517-ee7c77854f5c] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 825.505089] env[69367]: DEBUG oslo_concurrency.lockutils [None req-17171b96-ca57-47e5-925e-adb21efe7e04 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 825.574448] env[69367]: DEBUG nova.scheduler.client.report [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Inventory has not changed for provider 19ddf8be-7305-4f70-8366-52a9957232e6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 825.611156] env[69367]: DEBUG nova.compute.manager [None req-b99aa412-e4c2-4ca1-99d0-611e38feac1e tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: 42114002-28e0-408a-862e-547680ed479f] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 825.688402] env[69367]: INFO nova.compute.manager [None req-fbe32c85-05fd-4b80-8aa6-4b5adf69b91f tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: 69d2e230-1c19-4a76-a517-ee7c77854f5c] Took 1.03 seconds to deallocate network for instance. [ 826.081317] env[69367]: DEBUG oslo_concurrency.lockutils [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.552s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 826.081885] env[69367]: DEBUG nova.compute.manager [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Start building networks asynchronously for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 826.084801] env[69367]: DEBUG oslo_concurrency.lockutils [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.648s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 826.086133] env[69367]: INFO nova.compute.claims [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 826.131666] env[69367]: DEBUG oslo_concurrency.lockutils [None req-b99aa412-e4c2-4ca1-99d0-611e38feac1e tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 826.231014] env[69367]: DEBUG oslo_concurrency.lockutils [None req-7d4a53ba-51e5-4ef7-a64b-94f8d6538c4f tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Acquiring lock "f24baa3c-a91e-4bcc-8d8b-980e8d2a2f05" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 826.231829] env[69367]: DEBUG oslo_concurrency.lockutils [None req-7d4a53ba-51e5-4ef7-a64b-94f8d6538c4f tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Lock "f24baa3c-a91e-4bcc-8d8b-980e8d2a2f05" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 826.591766] env[69367]: DEBUG nova.compute.utils [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Using /dev/sd instead of None {{(pid=69367) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 826.594646] env[69367]: DEBUG nova.compute.manager [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Allocating IP information in the background. {{(pid=69367) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 826.594845] env[69367]: DEBUG nova.network.neutron [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] allocate_for_instance() {{(pid=69367) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 826.657129] env[69367]: DEBUG nova.policy [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0da2fd26b98d4990b19d51991ad26eda', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '68ad9e06b1fb4e5bbad98a14e0c55c60', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69367) authorize /opt/stack/nova/nova/policy.py:192}} [ 826.725019] env[69367]: INFO nova.scheduler.client.report [None req-fbe32c85-05fd-4b80-8aa6-4b5adf69b91f tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Deleted allocations for instance 69d2e230-1c19-4a76-a517-ee7c77854f5c [ 827.032923] env[69367]: DEBUG nova.network.neutron [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Successfully created port: 7485ac3f-c5a1-4b84-a33a-afb79101e2e9 {{(pid=69367) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 827.095680] env[69367]: DEBUG nova.compute.manager [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Start building block device mappings for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 827.233827] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fbe32c85-05fd-4b80-8aa6-4b5adf69b91f tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Lock "69d2e230-1c19-4a76-a517-ee7c77854f5c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 59.278s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 827.362284] env[69367]: DEBUG nova.network.neutron [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Successfully created port: 19be5951-2642-49af-aaa5-d15bc24f0434 {{(pid=69367) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 827.504028] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2207fbff-4ea7-43ba-aa03-00e48c5099f8 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.511116] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe9c8827-3f03-4c0d-aa75-8b49c6871b2b {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.543538] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75a89502-7436-43a9-b0a6-5426cfed5e11 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.555020] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f9f3e73-7a18-4757-8738-1d50767b117a {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.567445] env[69367]: DEBUG nova.compute.provider_tree [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Inventory has not changed in ProviderTree for provider: 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 827.608655] env[69367]: DEBUG nova.network.neutron [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Successfully created port: 645e6040-4204-42a3-8600-608066c94ade {{(pid=69367) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 827.740070] env[69367]: DEBUG nova.compute.manager [None req-5585bf21-ce60-40c9-b64f-d5c29c2b3a79 tempest-ServersListShow2100Test-1441418592 tempest-ServersListShow2100Test-1441418592-project-member] [instance: 097b74f5-19a1-41be-968d-19489ea9733c] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 828.074119] env[69367]: DEBUG nova.scheduler.client.report [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Inventory has not changed for provider 19ddf8be-7305-4f70-8366-52a9957232e6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 828.109249] env[69367]: DEBUG nova.compute.manager [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Start spawning the instance on the hypervisor. {{(pid=69367) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 828.135840] env[69367]: DEBUG nova.virt.hardware [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-19T12:49:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-19T12:49:28Z,direct_url=,disk_format='vmdk',id=2b099420-9152-4d93-9609-4c9317824c11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cd7c200d5cd6461fb951580f8c764c42',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-19T12:49:29Z,virtual_size=,visibility=), allow threads: False {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 828.136149] env[69367]: DEBUG nova.virt.hardware [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Flavor limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 828.136400] env[69367]: DEBUG nova.virt.hardware [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Image limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 828.136697] env[69367]: DEBUG nova.virt.hardware [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Flavor pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 828.136941] env[69367]: DEBUG nova.virt.hardware [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Image pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 828.137223] env[69367]: DEBUG nova.virt.hardware [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 828.137568] env[69367]: DEBUG nova.virt.hardware [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 828.137789] env[69367]: DEBUG nova.virt.hardware [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 828.137988] env[69367]: DEBUG nova.virt.hardware [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Got 1 possible topologies {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 828.138174] env[69367]: DEBUG nova.virt.hardware [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 828.138353] env[69367]: DEBUG nova.virt.hardware [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 828.139407] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf22572f-bc1a-4d22-a3e0-3063af373863 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.148551] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5441e3bb-15ea-49bb-8b47-083275bc2d49 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.269778] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5585bf21-ce60-40c9-b64f-d5c29c2b3a79 tempest-ServersListShow2100Test-1441418592 tempest-ServersListShow2100Test-1441418592-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 828.360136] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e83ef516-a274-41e3-af27-4c55f99e898b tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Acquiring lock "d2f8328d-fd05-4e63-9cbd-a6e3ec948964" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 828.360422] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e83ef516-a274-41e3-af27-4c55f99e898b tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Lock "d2f8328d-fd05-4e63-9cbd-a6e3ec948964" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 828.579788] env[69367]: DEBUG oslo_concurrency.lockutils [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.495s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 828.580379] env[69367]: DEBUG nova.compute.manager [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Start building networks asynchronously for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 828.583030] env[69367]: DEBUG oslo_concurrency.lockutils [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.886s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 828.584502] env[69367]: INFO nova.compute.claims [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] [instance: c272b0ae-6313-46ab-977c-6de255e77675] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 828.864333] env[69367]: INFO nova.compute.manager [None req-e83ef516-a274-41e3-af27-4c55f99e898b tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Detaching volume 0572f679-bc16-40f7-b698-92ec30d0b913 [ 828.902599] env[69367]: INFO nova.virt.block_device [None req-e83ef516-a274-41e3-af27-4c55f99e898b tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Attempting to driver detach volume 0572f679-bc16-40f7-b698-92ec30d0b913 from mountpoint /dev/sdb [ 828.902858] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-e83ef516-a274-41e3-af27-4c55f99e898b tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Volume detach. Driver type: vmdk {{(pid=69367) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 828.903086] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-e83ef516-a274-41e3-af27-4c55f99e898b tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-837720', 'volume_id': '0572f679-bc16-40f7-b698-92ec30d0b913', 'name': 'volume-0572f679-bc16-40f7-b698-92ec30d0b913', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd2f8328d-fd05-4e63-9cbd-a6e3ec948964', 'attached_at': '', 'detached_at': '', 'volume_id': '0572f679-bc16-40f7-b698-92ec30d0b913', 'serial': '0572f679-bc16-40f7-b698-92ec30d0b913'} {{(pid=69367) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 828.904091] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2837cfc-682c-49ae-a9a7-d88938bbe695 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.931873] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1178e2f6-945d-4244-94b6-c85ee572ecc9 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.940067] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1879f820-a7f9-4764-b7ce-cd18caa3fb9b {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.962998] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75e4a1c3-6187-44e4-bd2c-ae41f4f0dc49 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.978897] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-e83ef516-a274-41e3-af27-4c55f99e898b tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] The volume has not been displaced from its original location: [localhost-esx-install-datastore (1)] volume-0572f679-bc16-40f7-b698-92ec30d0b913/volume-0572f679-bc16-40f7-b698-92ec30d0b913.vmdk. No consolidation needed. {{(pid=69367) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 828.984791] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-e83ef516-a274-41e3-af27-4c55f99e898b tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Reconfiguring VM instance instance-00000018 to detach disk 2001 {{(pid=69367) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 828.985139] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-17cd69aa-418d-436f-b2e7-68b7e9d5bcc6 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.005352] env[69367]: DEBUG oslo_vmware.api [None req-e83ef516-a274-41e3-af27-4c55f99e898b tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Waiting for the task: (returnval){ [ 829.005352] env[69367]: value = "task-4234008" [ 829.005352] env[69367]: _type = "Task" [ 829.005352] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.014497] env[69367]: DEBUG oslo_vmware.api [None req-e83ef516-a274-41e3-af27-4c55f99e898b tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Task: {'id': task-4234008, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.068041] env[69367]: DEBUG nova.compute.manager [req-af0dba19-e872-46f6-80cb-56398f9b9627 req-7a21e802-98c2-43ba-8d79-7457aa0b5b8d service nova] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Received event network-vif-plugged-7485ac3f-c5a1-4b84-a33a-afb79101e2e9 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 829.068309] env[69367]: DEBUG oslo_concurrency.lockutils [req-af0dba19-e872-46f6-80cb-56398f9b9627 req-7a21e802-98c2-43ba-8d79-7457aa0b5b8d service nova] Acquiring lock "48470f96-56d2-4ca2-8078-c5ff4f6db71b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 829.068501] env[69367]: DEBUG oslo_concurrency.lockutils [req-af0dba19-e872-46f6-80cb-56398f9b9627 req-7a21e802-98c2-43ba-8d79-7457aa0b5b8d service nova] Lock "48470f96-56d2-4ca2-8078-c5ff4f6db71b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 829.068684] env[69367]: DEBUG oslo_concurrency.lockutils [req-af0dba19-e872-46f6-80cb-56398f9b9627 req-7a21e802-98c2-43ba-8d79-7457aa0b5b8d service nova] Lock "48470f96-56d2-4ca2-8078-c5ff4f6db71b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 829.068852] env[69367]: DEBUG nova.compute.manager [req-af0dba19-e872-46f6-80cb-56398f9b9627 req-7a21e802-98c2-43ba-8d79-7457aa0b5b8d service nova] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] No waiting events found dispatching network-vif-plugged-7485ac3f-c5a1-4b84-a33a-afb79101e2e9 {{(pid=69367) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 829.069117] env[69367]: WARNING nova.compute.manager [req-af0dba19-e872-46f6-80cb-56398f9b9627 req-7a21e802-98c2-43ba-8d79-7457aa0b5b8d service nova] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Received unexpected event network-vif-plugged-7485ac3f-c5a1-4b84-a33a-afb79101e2e9 for instance with vm_state building and task_state spawning. [ 829.089564] env[69367]: DEBUG nova.compute.utils [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Using /dev/sd instead of None {{(pid=69367) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 829.093215] env[69367]: DEBUG nova.compute.manager [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Allocating IP information in the background. {{(pid=69367) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 829.093427] env[69367]: DEBUG nova.network.neutron [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] allocate_for_instance() {{(pid=69367) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 829.150300] env[69367]: DEBUG nova.policy [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e41ee7b20e2445599242708eaa8a3c37', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b226cc896aa743e29a832ad416ec83ad', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69367) authorize /opt/stack/nova/nova/policy.py:192}} [ 829.276041] env[69367]: DEBUG nova.network.neutron [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Successfully updated port: 7485ac3f-c5a1-4b84-a33a-afb79101e2e9 {{(pid=69367) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 829.510450] env[69367]: DEBUG nova.network.neutron [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Successfully created port: 78c06e71-f193-4afd-bc2a-6864911de0ff {{(pid=69367) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 829.517671] env[69367]: DEBUG oslo_vmware.api [None req-e83ef516-a274-41e3-af27-4c55f99e898b tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Task: {'id': task-4234008, 'name': ReconfigVM_Task, 'duration_secs': 0.233461} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.517932] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-e83ef516-a274-41e3-af27-4c55f99e898b tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Reconfigured VM instance instance-00000018 to detach disk 2001 {{(pid=69367) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 829.523220] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2a4f9bc6-b00e-4141-b9df-73fe84661e65 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.538763] env[69367]: DEBUG oslo_vmware.api [None req-e83ef516-a274-41e3-af27-4c55f99e898b tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Waiting for the task: (returnval){ [ 829.538763] env[69367]: value = "task-4234009" [ 829.538763] env[69367]: _type = "Task" [ 829.538763] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.547250] env[69367]: DEBUG oslo_vmware.api [None req-e83ef516-a274-41e3-af27-4c55f99e898b tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Task: {'id': task-4234009, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.594092] env[69367]: DEBUG nova.compute.manager [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Start building block device mappings for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 829.965866] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65883dde-19a2-4c09-ab4b-e30b8ef73380 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.975321] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8aa2cb5-f521-4f57-ba96-39313b2bfeed {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.008760] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11d7f2cb-a42c-48a1-a1fc-eb52822b2bf1 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.017391] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4abaae9-b843-402a-a5dd-502381591820 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.033156] env[69367]: DEBUG nova.compute.provider_tree [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Inventory has not changed in ProviderTree for provider: 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 830.048829] env[69367]: DEBUG oslo_vmware.api [None req-e83ef516-a274-41e3-af27-4c55f99e898b tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Task: {'id': task-4234009, 'name': ReconfigVM_Task, 'duration_secs': 0.143983} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.049259] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-e83ef516-a274-41e3-af27-4c55f99e898b tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-837720', 'volume_id': '0572f679-bc16-40f7-b698-92ec30d0b913', 'name': 'volume-0572f679-bc16-40f7-b698-92ec30d0b913', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd2f8328d-fd05-4e63-9cbd-a6e3ec948964', 'attached_at': '', 'detached_at': '', 'volume_id': '0572f679-bc16-40f7-b698-92ec30d0b913', 'serial': '0572f679-bc16-40f7-b698-92ec30d0b913'} {{(pid=69367) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 830.537055] env[69367]: DEBUG nova.scheduler.client.report [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Inventory has not changed for provider 19ddf8be-7305-4f70-8366-52a9957232e6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 830.606516] env[69367]: DEBUG nova.compute.manager [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Start spawning the instance on the hypervisor. {{(pid=69367) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 830.614017] env[69367]: DEBUG nova.objects.instance [None req-e83ef516-a274-41e3-af27-4c55f99e898b tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Lazy-loading 'flavor' on Instance uuid d2f8328d-fd05-4e63-9cbd-a6e3ec948964 {{(pid=69367) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 830.633907] env[69367]: DEBUG nova.virt.hardware [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-19T12:49:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-19T12:49:28Z,direct_url=,disk_format='vmdk',id=2b099420-9152-4d93-9609-4c9317824c11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cd7c200d5cd6461fb951580f8c764c42',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-19T12:49:29Z,virtual_size=,visibility=), allow threads: False {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 830.634165] env[69367]: DEBUG nova.virt.hardware [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Flavor limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 830.634323] env[69367]: DEBUG nova.virt.hardware [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Image limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 830.634504] env[69367]: DEBUG nova.virt.hardware [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Flavor pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 830.634651] env[69367]: DEBUG nova.virt.hardware [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Image pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 830.634798] env[69367]: DEBUG nova.virt.hardware [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 830.635008] env[69367]: DEBUG nova.virt.hardware [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 830.635175] env[69367]: DEBUG nova.virt.hardware [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 830.635341] env[69367]: DEBUG nova.virt.hardware [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Got 1 possible topologies {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 830.635501] env[69367]: DEBUG nova.virt.hardware [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 830.635673] env[69367]: DEBUG nova.virt.hardware [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 830.636759] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba0f0bcb-696b-42e0-b211-b4fcc6174ec3 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.645273] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16b17ac7-95e7-415e-a4be-27a9c40e3358 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.008642] env[69367]: DEBUG nova.network.neutron [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Successfully updated port: 78c06e71-f193-4afd-bc2a-6864911de0ff {{(pid=69367) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 831.043071] env[69367]: DEBUG oslo_concurrency.lockutils [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.459s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 831.043071] env[69367]: DEBUG nova.compute.manager [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] [instance: c272b0ae-6313-46ab-977c-6de255e77675] Start building networks asynchronously for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 831.045512] env[69367]: DEBUG oslo_concurrency.lockutils [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.294s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 831.048490] env[69367]: INFO nova.compute.claims [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 4a46d003-f57e-4089-aa60-757a4246f071] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 831.107215] env[69367]: DEBUG nova.compute.manager [req-635499a9-c7dc-41df-8b9c-6f7b270998e1 req-7483fcb3-9be6-49bd-bfaa-904b434de689 service nova] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Received event network-changed-7485ac3f-c5a1-4b84-a33a-afb79101e2e9 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 831.107909] env[69367]: DEBUG nova.compute.manager [req-635499a9-c7dc-41df-8b9c-6f7b270998e1 req-7483fcb3-9be6-49bd-bfaa-904b434de689 service nova] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Refreshing instance network info cache due to event network-changed-7485ac3f-c5a1-4b84-a33a-afb79101e2e9. {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11772}} [ 831.107909] env[69367]: DEBUG oslo_concurrency.lockutils [req-635499a9-c7dc-41df-8b9c-6f7b270998e1 req-7483fcb3-9be6-49bd-bfaa-904b434de689 service nova] Acquiring lock "refresh_cache-48470f96-56d2-4ca2-8078-c5ff4f6db71b" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 831.108073] env[69367]: DEBUG oslo_concurrency.lockutils [req-635499a9-c7dc-41df-8b9c-6f7b270998e1 req-7483fcb3-9be6-49bd-bfaa-904b434de689 service nova] Acquired lock "refresh_cache-48470f96-56d2-4ca2-8078-c5ff4f6db71b" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 831.108231] env[69367]: DEBUG nova.network.neutron [req-635499a9-c7dc-41df-8b9c-6f7b270998e1 req-7483fcb3-9be6-49bd-bfaa-904b434de689 service nova] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Refreshing network info cache for port 7485ac3f-c5a1-4b84-a33a-afb79101e2e9 {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 831.406382] env[69367]: DEBUG nova.network.neutron [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Successfully updated port: 19be5951-2642-49af-aaa5-d15bc24f0434 {{(pid=69367) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 831.516026] env[69367]: DEBUG oslo_concurrency.lockutils [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Acquiring lock "refresh_cache-95efcff3-a81b-49fb-b85a-dae060c023b2" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 831.516026] env[69367]: DEBUG oslo_concurrency.lockutils [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Acquired lock "refresh_cache-95efcff3-a81b-49fb-b85a-dae060c023b2" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 831.516300] env[69367]: DEBUG nova.network.neutron [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 831.553173] env[69367]: DEBUG nova.compute.utils [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Using /dev/sd instead of None {{(pid=69367) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 831.557639] env[69367]: DEBUG nova.compute.manager [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] [instance: c272b0ae-6313-46ab-977c-6de255e77675] Not allocating networking since 'none' was specified. {{(pid=69367) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 831.621625] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e83ef516-a274-41e3-af27-4c55f99e898b tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Lock "d2f8328d-fd05-4e63-9cbd-a6e3ec948964" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.261s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 831.649858] env[69367]: DEBUG nova.network.neutron [req-635499a9-c7dc-41df-8b9c-6f7b270998e1 req-7483fcb3-9be6-49bd-bfaa-904b434de689 service nova] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 831.727653] env[69367]: DEBUG nova.network.neutron [req-635499a9-c7dc-41df-8b9c-6f7b270998e1 req-7483fcb3-9be6-49bd-bfaa-904b434de689 service nova] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 832.056972] env[69367]: DEBUG nova.compute.manager [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] [instance: c272b0ae-6313-46ab-977c-6de255e77675] Start building block device mappings for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 832.067172] env[69367]: DEBUG nova.network.neutron [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 832.232905] env[69367]: DEBUG oslo_concurrency.lockutils [req-635499a9-c7dc-41df-8b9c-6f7b270998e1 req-7483fcb3-9be6-49bd-bfaa-904b434de689 service nova] Releasing lock "refresh_cache-48470f96-56d2-4ca2-8078-c5ff4f6db71b" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 832.233196] env[69367]: DEBUG nova.compute.manager [req-635499a9-c7dc-41df-8b9c-6f7b270998e1 req-7483fcb3-9be6-49bd-bfaa-904b434de689 service nova] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Received event network-vif-plugged-78c06e71-f193-4afd-bc2a-6864911de0ff {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 832.233410] env[69367]: DEBUG oslo_concurrency.lockutils [req-635499a9-c7dc-41df-8b9c-6f7b270998e1 req-7483fcb3-9be6-49bd-bfaa-904b434de689 service nova] Acquiring lock "95efcff3-a81b-49fb-b85a-dae060c023b2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 832.233704] env[69367]: DEBUG oslo_concurrency.lockutils [req-635499a9-c7dc-41df-8b9c-6f7b270998e1 req-7483fcb3-9be6-49bd-bfaa-904b434de689 service nova] Lock "95efcff3-a81b-49fb-b85a-dae060c023b2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 832.233875] env[69367]: DEBUG oslo_concurrency.lockutils [req-635499a9-c7dc-41df-8b9c-6f7b270998e1 req-7483fcb3-9be6-49bd-bfaa-904b434de689 service nova] Lock "95efcff3-a81b-49fb-b85a-dae060c023b2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 832.234063] env[69367]: DEBUG nova.compute.manager [req-635499a9-c7dc-41df-8b9c-6f7b270998e1 req-7483fcb3-9be6-49bd-bfaa-904b434de689 service nova] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] No waiting events found dispatching network-vif-plugged-78c06e71-f193-4afd-bc2a-6864911de0ff {{(pid=69367) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 832.234242] env[69367]: WARNING nova.compute.manager [req-635499a9-c7dc-41df-8b9c-6f7b270998e1 req-7483fcb3-9be6-49bd-bfaa-904b434de689 service nova] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Received unexpected event network-vif-plugged-78c06e71-f193-4afd-bc2a-6864911de0ff for instance with vm_state building and task_state spawning. [ 832.234528] env[69367]: DEBUG nova.compute.manager [req-635499a9-c7dc-41df-8b9c-6f7b270998e1 req-7483fcb3-9be6-49bd-bfaa-904b434de689 service nova] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Received event network-changed-78c06e71-f193-4afd-bc2a-6864911de0ff {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 832.234604] env[69367]: DEBUG nova.compute.manager [req-635499a9-c7dc-41df-8b9c-6f7b270998e1 req-7483fcb3-9be6-49bd-bfaa-904b434de689 service nova] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Refreshing instance network info cache due to event network-changed-78c06e71-f193-4afd-bc2a-6864911de0ff. {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11772}} [ 832.234728] env[69367]: DEBUG oslo_concurrency.lockutils [req-635499a9-c7dc-41df-8b9c-6f7b270998e1 req-7483fcb3-9be6-49bd-bfaa-904b434de689 service nova] Acquiring lock "refresh_cache-95efcff3-a81b-49fb-b85a-dae060c023b2" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 832.350904] env[69367]: DEBUG nova.network.neutron [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Updating instance_info_cache with network_info: [{"id": "78c06e71-f193-4afd-bc2a-6864911de0ff", "address": "fa:16:3e:59:ce:47", "network": {"id": "6b4c8986-defe-42da-a620-bcbca6046b09", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1648091500-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b226cc896aa743e29a832ad416ec83ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16f09e8c-5240-4839-80cc-62ec29700bd2", "external-id": "nsx-vlan-transportzone-720", "segmentation_id": 720, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap78c06e71-f1", "ovs_interfaceid": "78c06e71-f193-4afd-bc2a-6864911de0ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 832.480392] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffa55abd-27d0-43fd-b3c7-7befbd4b0b32 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.489239] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb3fac3e-8601-4d66-a895-1ee60d11f336 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.525546] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caaf7f76-fd09-4931-baf4-85b64c20f1ee {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.534571] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5409073-7571-4fb1-b131-0f576094367e {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.549703] env[69367]: DEBUG nova.compute.provider_tree [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Inventory has not changed in ProviderTree for provider: 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 832.706175] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5fd10dcf-a94e-4432-b8c9-227cd572cc3a tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Acquiring lock "d2f8328d-fd05-4e63-9cbd-a6e3ec948964" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 832.706477] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5fd10dcf-a94e-4432-b8c9-227cd572cc3a tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Lock "d2f8328d-fd05-4e63-9cbd-a6e3ec948964" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 832.706660] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5fd10dcf-a94e-4432-b8c9-227cd572cc3a tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Acquiring lock "d2f8328d-fd05-4e63-9cbd-a6e3ec948964-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 832.706842] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5fd10dcf-a94e-4432-b8c9-227cd572cc3a tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Lock "d2f8328d-fd05-4e63-9cbd-a6e3ec948964-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 832.707028] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5fd10dcf-a94e-4432-b8c9-227cd572cc3a tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Lock "d2f8328d-fd05-4e63-9cbd-a6e3ec948964-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 832.709230] env[69367]: INFO nova.compute.manager [None req-5fd10dcf-a94e-4432-b8c9-227cd572cc3a tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Terminating instance [ 832.856359] env[69367]: DEBUG oslo_concurrency.lockutils [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Releasing lock "refresh_cache-95efcff3-a81b-49fb-b85a-dae060c023b2" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 832.856734] env[69367]: DEBUG nova.compute.manager [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Instance network_info: |[{"id": "78c06e71-f193-4afd-bc2a-6864911de0ff", "address": "fa:16:3e:59:ce:47", "network": {"id": "6b4c8986-defe-42da-a620-bcbca6046b09", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1648091500-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b226cc896aa743e29a832ad416ec83ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16f09e8c-5240-4839-80cc-62ec29700bd2", "external-id": "nsx-vlan-transportzone-720", "segmentation_id": 720, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap78c06e71-f1", "ovs_interfaceid": "78c06e71-f193-4afd-bc2a-6864911de0ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69367) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 832.857125] env[69367]: DEBUG oslo_concurrency.lockutils [req-635499a9-c7dc-41df-8b9c-6f7b270998e1 req-7483fcb3-9be6-49bd-bfaa-904b434de689 service nova] Acquired lock "refresh_cache-95efcff3-a81b-49fb-b85a-dae060c023b2" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 832.857308] env[69367]: DEBUG nova.network.neutron [req-635499a9-c7dc-41df-8b9c-6f7b270998e1 req-7483fcb3-9be6-49bd-bfaa-904b434de689 service nova] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Refreshing network info cache for port 78c06e71-f193-4afd-bc2a-6864911de0ff {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 832.858493] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:59:ce:47', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '16f09e8c-5240-4839-80cc-62ec29700bd2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '78c06e71-f193-4afd-bc2a-6864911de0ff', 'vif_model': 'vmxnet3'}] {{(pid=69367) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 832.866287] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Creating folder: Project (b226cc896aa743e29a832ad416ec83ad). Parent ref: group-v837645. {{(pid=69367) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 832.867296] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-45041866-574a-43dd-8a5a-47fc7f54da3b {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.880595] env[69367]: INFO nova.virt.vmwareapi.vm_util [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Created folder: Project (b226cc896aa743e29a832ad416ec83ad) in parent group-v837645. [ 832.880760] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Creating folder: Instances. Parent ref: group-v837728. {{(pid=69367) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 832.880986] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-53bf5129-2813-4ff7-b856-bc2ada2981be {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.891181] env[69367]: INFO nova.virt.vmwareapi.vm_util [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Created folder: Instances in parent group-v837728. [ 832.891429] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 832.891623] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Creating VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 832.891832] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-236a9c44-c813-4496-aad8-9b6b36b3a9d2 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.911439] env[69367]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 832.911439] env[69367]: value = "task-4234012" [ 832.911439] env[69367]: _type = "Task" [ 832.911439] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.919574] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234012, 'name': CreateVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.053534] env[69367]: DEBUG nova.scheduler.client.report [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Inventory has not changed for provider 19ddf8be-7305-4f70-8366-52a9957232e6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 833.075349] env[69367]: DEBUG nova.compute.manager [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] [instance: c272b0ae-6313-46ab-977c-6de255e77675] Start spawning the instance on the hypervisor. {{(pid=69367) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 833.106451] env[69367]: DEBUG nova.virt.hardware [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-19T12:49:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-19T12:49:28Z,direct_url=,disk_format='vmdk',id=2b099420-9152-4d93-9609-4c9317824c11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cd7c200d5cd6461fb951580f8c764c42',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-19T12:49:29Z,virtual_size=,visibility=), allow threads: False {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 833.106750] env[69367]: DEBUG nova.virt.hardware [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Flavor limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 833.106981] env[69367]: DEBUG nova.virt.hardware [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Image limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 833.107282] env[69367]: DEBUG nova.virt.hardware [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Flavor pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 833.107455] env[69367]: DEBUG nova.virt.hardware [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Image pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 833.107629] env[69367]: DEBUG nova.virt.hardware [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 833.107861] env[69367]: DEBUG nova.virt.hardware [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 833.108057] env[69367]: DEBUG nova.virt.hardware [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 833.108248] env[69367]: DEBUG nova.virt.hardware [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Got 1 possible topologies {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 833.108429] env[69367]: DEBUG nova.virt.hardware [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 833.108655] env[69367]: DEBUG nova.virt.hardware [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 833.110095] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3693517-aea9-45b8-af45-e19f798a99ab {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.120095] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-497487ea-c7e5-49d0-9ae0-eae6b87cf399 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.136222] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] [instance: c272b0ae-6313-46ab-977c-6de255e77675] Instance VIF info [] {{(pid=69367) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 833.142373] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Creating folder: Project (d94c4d7dc99540eeb9103d51c12dc802). Parent ref: group-v837645. {{(pid=69367) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 833.143984] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6c6ac8f0-b748-43cf-9f8b-23d868bfba63 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.147055] env[69367]: DEBUG nova.compute.manager [req-83cc4a09-a7c1-4424-a2ee-c3a050c4a72f req-05d7c18f-b024-498e-91db-1f2044663369 service nova] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Received event network-vif-plugged-19be5951-2642-49af-aaa5-d15bc24f0434 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 833.147286] env[69367]: DEBUG oslo_concurrency.lockutils [req-83cc4a09-a7c1-4424-a2ee-c3a050c4a72f req-05d7c18f-b024-498e-91db-1f2044663369 service nova] Acquiring lock "48470f96-56d2-4ca2-8078-c5ff4f6db71b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 833.147537] env[69367]: DEBUG oslo_concurrency.lockutils [req-83cc4a09-a7c1-4424-a2ee-c3a050c4a72f req-05d7c18f-b024-498e-91db-1f2044663369 service nova] Lock "48470f96-56d2-4ca2-8078-c5ff4f6db71b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 833.147715] env[69367]: DEBUG oslo_concurrency.lockutils [req-83cc4a09-a7c1-4424-a2ee-c3a050c4a72f req-05d7c18f-b024-498e-91db-1f2044663369 service nova] Lock "48470f96-56d2-4ca2-8078-c5ff4f6db71b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 833.147881] env[69367]: DEBUG nova.compute.manager [req-83cc4a09-a7c1-4424-a2ee-c3a050c4a72f req-05d7c18f-b024-498e-91db-1f2044663369 service nova] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] No waiting events found dispatching network-vif-plugged-19be5951-2642-49af-aaa5-d15bc24f0434 {{(pid=69367) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 833.148057] env[69367]: WARNING nova.compute.manager [req-83cc4a09-a7c1-4424-a2ee-c3a050c4a72f req-05d7c18f-b024-498e-91db-1f2044663369 service nova] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Received unexpected event network-vif-plugged-19be5951-2642-49af-aaa5-d15bc24f0434 for instance with vm_state building and task_state spawning. [ 833.148223] env[69367]: DEBUG nova.compute.manager [req-83cc4a09-a7c1-4424-a2ee-c3a050c4a72f req-05d7c18f-b024-498e-91db-1f2044663369 service nova] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Received event network-changed-19be5951-2642-49af-aaa5-d15bc24f0434 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 833.148378] env[69367]: DEBUG nova.compute.manager [req-83cc4a09-a7c1-4424-a2ee-c3a050c4a72f req-05d7c18f-b024-498e-91db-1f2044663369 service nova] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Refreshing instance network info cache due to event network-changed-19be5951-2642-49af-aaa5-d15bc24f0434. {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11772}} [ 833.148820] env[69367]: DEBUG oslo_concurrency.lockutils [req-83cc4a09-a7c1-4424-a2ee-c3a050c4a72f req-05d7c18f-b024-498e-91db-1f2044663369 service nova] Acquiring lock "refresh_cache-48470f96-56d2-4ca2-8078-c5ff4f6db71b" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 833.148820] env[69367]: DEBUG oslo_concurrency.lockutils [req-83cc4a09-a7c1-4424-a2ee-c3a050c4a72f req-05d7c18f-b024-498e-91db-1f2044663369 service nova] Acquired lock "refresh_cache-48470f96-56d2-4ca2-8078-c5ff4f6db71b" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 833.148938] env[69367]: DEBUG nova.network.neutron [req-83cc4a09-a7c1-4424-a2ee-c3a050c4a72f req-05d7c18f-b024-498e-91db-1f2044663369 service nova] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Refreshing network info cache for port 19be5951-2642-49af-aaa5-d15bc24f0434 {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 833.161360] env[69367]: INFO nova.virt.vmwareapi.vm_util [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Created folder: Project (d94c4d7dc99540eeb9103d51c12dc802) in parent group-v837645. [ 833.161360] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Creating folder: Instances. Parent ref: group-v837731. {{(pid=69367) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 833.161745] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b4f7185b-84a5-4a24-96d1-c24fbc9b5b78 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.174631] env[69367]: INFO nova.virt.vmwareapi.vm_util [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Created folder: Instances in parent group-v837731. [ 833.174631] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 833.174631] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c272b0ae-6313-46ab-977c-6de255e77675] Creating VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 833.174631] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2bbc6dcb-d282-4fba-83fa-be8113c15cd7 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.201529] env[69367]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 833.201529] env[69367]: value = "task-4234015" [ 833.201529] env[69367]: _type = "Task" [ 833.201529] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.223878] env[69367]: DEBUG nova.compute.manager [None req-5fd10dcf-a94e-4432-b8c9-227cd572cc3a tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Start destroying the instance on the hypervisor. {{(pid=69367) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 833.224177] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-5fd10dcf-a94e-4432-b8c9-227cd572cc3a tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Destroying instance {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 833.226831] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234015, 'name': CreateVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.228091] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59549bcd-5616-4e11-9a9b-494f03de5c7c {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.248758] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-5fd10dcf-a94e-4432-b8c9-227cd572cc3a tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Powering off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 833.249279] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c31fcbd1-c584-46a6-8df8-75bd69c3ddae {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.262137] env[69367]: DEBUG oslo_vmware.api [None req-5fd10dcf-a94e-4432-b8c9-227cd572cc3a tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Waiting for the task: (returnval){ [ 833.262137] env[69367]: value = "task-4234016" [ 833.262137] env[69367]: _type = "Task" [ 833.262137] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.284455] env[69367]: DEBUG oslo_vmware.api [None req-5fd10dcf-a94e-4432-b8c9-227cd572cc3a tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Task: {'id': task-4234016, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.422313] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234012, 'name': CreateVM_Task, 'duration_secs': 0.345253} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.425159] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Created VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 833.426000] env[69367]: DEBUG oslo_concurrency.lockutils [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 833.426352] env[69367]: DEBUG oslo_concurrency.lockutils [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 833.426668] env[69367]: DEBUG oslo_concurrency.lockutils [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 833.427318] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-755532a4-7520-4a90-bf3e-bf22e290aaa7 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.433390] env[69367]: DEBUG oslo_vmware.api [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Waiting for the task: (returnval){ [ 833.433390] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]5204c88f-7115-523f-0064-fd18e8ef1271" [ 833.433390] env[69367]: _type = "Task" [ 833.433390] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.444773] env[69367]: DEBUG oslo_vmware.api [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]5204c88f-7115-523f-0064-fd18e8ef1271, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.558856] env[69367]: DEBUG oslo_concurrency.lockutils [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.513s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 833.559440] env[69367]: DEBUG nova.compute.manager [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 4a46d003-f57e-4089-aa60-757a4246f071] Start building networks asynchronously for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 833.564751] env[69367]: DEBUG oslo_concurrency.lockutils [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.666s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 833.570197] env[69367]: INFO nova.compute.claims [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 46b6bc45-57f0-4850-9249-6bbb22b162c6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 833.689717] env[69367]: DEBUG nova.network.neutron [req-83cc4a09-a7c1-4424-a2ee-c3a050c4a72f req-05d7c18f-b024-498e-91db-1f2044663369 service nova] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 833.713947] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234015, 'name': CreateVM_Task, 'duration_secs': 0.30274} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.714365] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c272b0ae-6313-46ab-977c-6de255e77675] Created VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 833.714822] env[69367]: DEBUG oslo_concurrency.lockutils [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 833.768938] env[69367]: DEBUG nova.network.neutron [req-635499a9-c7dc-41df-8b9c-6f7b270998e1 req-7483fcb3-9be6-49bd-bfaa-904b434de689 service nova] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Updated VIF entry in instance network info cache for port 78c06e71-f193-4afd-bc2a-6864911de0ff. {{(pid=69367) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 833.769361] env[69367]: DEBUG nova.network.neutron [req-635499a9-c7dc-41df-8b9c-6f7b270998e1 req-7483fcb3-9be6-49bd-bfaa-904b434de689 service nova] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Updating instance_info_cache with network_info: [{"id": "78c06e71-f193-4afd-bc2a-6864911de0ff", "address": "fa:16:3e:59:ce:47", "network": {"id": "6b4c8986-defe-42da-a620-bcbca6046b09", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1648091500-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b226cc896aa743e29a832ad416ec83ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16f09e8c-5240-4839-80cc-62ec29700bd2", "external-id": "nsx-vlan-transportzone-720", "segmentation_id": 720, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap78c06e71-f1", "ovs_interfaceid": "78c06e71-f193-4afd-bc2a-6864911de0ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 833.775523] env[69367]: DEBUG oslo_vmware.api [None req-5fd10dcf-a94e-4432-b8c9-227cd572cc3a tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Task: {'id': task-4234016, 'name': PowerOffVM_Task, 'duration_secs': 0.217222} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.775523] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-5fd10dcf-a94e-4432-b8c9-227cd572cc3a tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Powered off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 833.775523] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-5fd10dcf-a94e-4432-b8c9-227cd572cc3a tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Unregistering the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 833.775523] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9373daf5-1a71-4f7f-abf0-e2db13834b2f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.800834] env[69367]: DEBUG nova.network.neutron [req-83cc4a09-a7c1-4424-a2ee-c3a050c4a72f req-05d7c18f-b024-498e-91db-1f2044663369 service nova] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 833.844953] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-5fd10dcf-a94e-4432-b8c9-227cd572cc3a tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Unregistered the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 833.845836] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-5fd10dcf-a94e-4432-b8c9-227cd572cc3a tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Deleting contents of the VM from datastore datastore1 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 833.846226] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-5fd10dcf-a94e-4432-b8c9-227cd572cc3a tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Deleting the datastore file [datastore1] d2f8328d-fd05-4e63-9cbd-a6e3ec948964 {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 833.847282] env[69367]: DEBUG nova.network.neutron [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Successfully updated port: 645e6040-4204-42a3-8600-608066c94ade {{(pid=69367) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 833.849562] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2f826584-6ceb-4499-bc47-c9286c349d65 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.856430] env[69367]: DEBUG oslo_vmware.api [None req-5fd10dcf-a94e-4432-b8c9-227cd572cc3a tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Waiting for the task: (returnval){ [ 833.856430] env[69367]: value = "task-4234018" [ 833.856430] env[69367]: _type = "Task" [ 833.856430] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.868096] env[69367]: DEBUG oslo_vmware.api [None req-5fd10dcf-a94e-4432-b8c9-227cd572cc3a tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Task: {'id': task-4234018, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.944859] env[69367]: DEBUG oslo_vmware.api [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]5204c88f-7115-523f-0064-fd18e8ef1271, 'name': SearchDatastore_Task, 'duration_secs': 0.025441} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.945261] env[69367]: DEBUG oslo_concurrency.lockutils [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 833.945562] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Processing image 2b099420-9152-4d93-9609-4c9317824c11 {{(pid=69367) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 833.945897] env[69367]: DEBUG oslo_concurrency.lockutils [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 833.946048] env[69367]: DEBUG oslo_concurrency.lockutils [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 833.946291] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 833.946619] env[69367]: DEBUG oslo_concurrency.lockutils [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 833.946995] env[69367]: DEBUG oslo_concurrency.lockutils [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 833.947279] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d8827aa9-8077-4216-8819-6b7799f2a3e4 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.949431] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-be804468-a06c-4056-94c3-af35e8461f67 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.955412] env[69367]: DEBUG oslo_vmware.api [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Waiting for the task: (returnval){ [ 833.955412] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]523bacbc-ba67-d0e9-72b0-1e646cf7966b" [ 833.955412] env[69367]: _type = "Task" [ 833.955412] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.959404] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 833.959586] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69367) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 833.960650] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22cd8dc3-2dd0-4d1c-8343-d61934bac09d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.965543] env[69367]: DEBUG oslo_vmware.api [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]523bacbc-ba67-d0e9-72b0-1e646cf7966b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.968589] env[69367]: DEBUG oslo_vmware.api [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Waiting for the task: (returnval){ [ 833.968589] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52ee3ecd-6e4a-6267-dddf-754c1158c11c" [ 833.968589] env[69367]: _type = "Task" [ 833.968589] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.976214] env[69367]: DEBUG oslo_vmware.api [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52ee3ecd-6e4a-6267-dddf-754c1158c11c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.071518] env[69367]: DEBUG nova.compute.utils [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Using /dev/sd instead of None {{(pid=69367) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 834.075420] env[69367]: DEBUG nova.compute.manager [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 4a46d003-f57e-4089-aa60-757a4246f071] Allocating IP information in the background. {{(pid=69367) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 834.075646] env[69367]: DEBUG nova.network.neutron [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 4a46d003-f57e-4089-aa60-757a4246f071] allocate_for_instance() {{(pid=69367) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 834.117776] env[69367]: DEBUG nova.policy [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8b3d86a66b654d4fbcc4c4cab4c0ad75', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cfbdfc3a96db40c8a3e14c797422f08e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69367) authorize /opt/stack/nova/nova/policy.py:192}} [ 834.275556] env[69367]: DEBUG oslo_concurrency.lockutils [req-635499a9-c7dc-41df-8b9c-6f7b270998e1 req-7483fcb3-9be6-49bd-bfaa-904b434de689 service nova] Releasing lock "refresh_cache-95efcff3-a81b-49fb-b85a-dae060c023b2" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 834.303594] env[69367]: DEBUG oslo_concurrency.lockutils [req-83cc4a09-a7c1-4424-a2ee-c3a050c4a72f req-05d7c18f-b024-498e-91db-1f2044663369 service nova] Releasing lock "refresh_cache-48470f96-56d2-4ca2-8078-c5ff4f6db71b" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 834.349608] env[69367]: DEBUG oslo_concurrency.lockutils [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Acquiring lock "refresh_cache-48470f96-56d2-4ca2-8078-c5ff4f6db71b" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 834.349911] env[69367]: DEBUG oslo_concurrency.lockutils [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Acquired lock "refresh_cache-48470f96-56d2-4ca2-8078-c5ff4f6db71b" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 834.350220] env[69367]: DEBUG nova.network.neutron [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 834.369494] env[69367]: DEBUG oslo_vmware.api [None req-5fd10dcf-a94e-4432-b8c9-227cd572cc3a tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Task: {'id': task-4234018, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.138254} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.369751] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-5fd10dcf-a94e-4432-b8c9-227cd572cc3a tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Deleted the datastore file {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 834.369857] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-5fd10dcf-a94e-4432-b8c9-227cd572cc3a tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Deleted contents of the VM from datastore datastore1 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 834.370058] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-5fd10dcf-a94e-4432-b8c9-227cd572cc3a tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Instance destroyed {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 834.370260] env[69367]: INFO nova.compute.manager [None req-5fd10dcf-a94e-4432-b8c9-227cd572cc3a tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Took 1.15 seconds to destroy the instance on the hypervisor. [ 834.370614] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5fd10dcf-a94e-4432-b8c9-227cd572cc3a tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 834.370687] env[69367]: DEBUG nova.compute.manager [-] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 834.370789] env[69367]: DEBUG nova.network.neutron [-] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 834.407861] env[69367]: DEBUG nova.network.neutron [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 4a46d003-f57e-4089-aa60-757a4246f071] Successfully created port: badee9f4-aebf-4455-81d3-ddbb3adb8072 {{(pid=69367) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 834.465900] env[69367]: DEBUG oslo_vmware.api [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]523bacbc-ba67-d0e9-72b0-1e646cf7966b, 'name': SearchDatastore_Task, 'duration_secs': 0.0117} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.466277] env[69367]: DEBUG oslo_concurrency.lockutils [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 834.466424] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] [instance: c272b0ae-6313-46ab-977c-6de255e77675] Processing image 2b099420-9152-4d93-9609-4c9317824c11 {{(pid=69367) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 834.466639] env[69367]: DEBUG oslo_concurrency.lockutils [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 834.478742] env[69367]: DEBUG oslo_vmware.api [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52ee3ecd-6e4a-6267-dddf-754c1158c11c, 'name': SearchDatastore_Task, 'duration_secs': 0.012407} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.479659] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dd6ba0a5-0c15-4fde-867b-10d9f9f53256 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.485853] env[69367]: DEBUG oslo_vmware.api [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Waiting for the task: (returnval){ [ 834.485853] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52c0b6ca-1d3d-c4d1-57db-3e69817ecd08" [ 834.485853] env[69367]: _type = "Task" [ 834.485853] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.495280] env[69367]: DEBUG oslo_vmware.api [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52c0b6ca-1d3d-c4d1-57db-3e69817ecd08, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.581610] env[69367]: DEBUG nova.compute.manager [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 4a46d003-f57e-4089-aa60-757a4246f071] Start building block device mappings for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 834.897973] env[69367]: DEBUG nova.network.neutron [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 835.000598] env[69367]: DEBUG oslo_vmware.api [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52c0b6ca-1d3d-c4d1-57db-3e69817ecd08, 'name': SearchDatastore_Task, 'duration_secs': 0.034661} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.003721] env[69367]: DEBUG oslo_concurrency.lockutils [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 835.007456] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore2] 95efcff3-a81b-49fb-b85a-dae060c023b2/95efcff3-a81b-49fb-b85a-dae060c023b2.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 835.007456] env[69367]: DEBUG oslo_concurrency.lockutils [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 835.007456] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 835.007456] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-729f92b8-bf2e-4070-ba3f-53211fbf502a {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.010328] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-30c9d8dc-93c0-4c53-8690-bfa5f42ec45d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.024181] env[69367]: DEBUG oslo_vmware.api [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Waiting for the task: (returnval){ [ 835.024181] env[69367]: value = "task-4234019" [ 835.024181] env[69367]: _type = "Task" [ 835.024181] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.029595] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 835.030279] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69367) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 835.036384] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-003035a1-d3e1-4bb8-89d9-bb1e127fe911 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.040103] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49f4623f-ea4e-4aae-b297-52fbb953f830 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.049194] env[69367]: DEBUG oslo_vmware.api [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Task: {'id': task-4234019, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.050645] env[69367]: DEBUG nova.compute.manager [req-6d2dd7c1-49a6-47a7-9771-94b708bb835b req-45f312dc-34ae-4464-8320-18262e12f173 service nova] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Received event network-vif-deleted-5401116f-daf2-4db0-b052-7bd1adb63cc1 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 835.050645] env[69367]: INFO nova.compute.manager [req-6d2dd7c1-49a6-47a7-9771-94b708bb835b req-45f312dc-34ae-4464-8320-18262e12f173 service nova] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Neutron deleted interface 5401116f-daf2-4db0-b052-7bd1adb63cc1; detaching it from the instance and deleting it from the info cache [ 835.050790] env[69367]: DEBUG nova.network.neutron [req-6d2dd7c1-49a6-47a7-9771-94b708bb835b req-45f312dc-34ae-4464-8320-18262e12f173 service nova] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 835.060761] env[69367]: DEBUG oslo_vmware.api [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Waiting for the task: (returnval){ [ 835.060761] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]525882df-618f-628d-7a0d-196e4d2f1a0d" [ 835.060761] env[69367]: _type = "Task" [ 835.060761] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.063427] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09396de0-b396-498d-84ae-2b22049f67d3 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.110626] env[69367]: DEBUG oslo_vmware.api [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]525882df-618f-628d-7a0d-196e4d2f1a0d, 'name': SearchDatastore_Task, 'duration_secs': 0.009849} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.113619] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14a6e9b2-3216-4ef4-a152-940728865d66 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.117021] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd91e54b-0838-4f33-be66-8a4203319d02 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.125668] env[69367]: DEBUG oslo_vmware.api [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Waiting for the task: (returnval){ [ 835.125668] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]526e07ef-717a-0c2a-dc52-d0e9767cbc0c" [ 835.125668] env[69367]: _type = "Task" [ 835.125668] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.127242] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ee94c8c-8df8-406a-be6d-305c7f126359 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.151231] env[69367]: DEBUG nova.compute.provider_tree [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Inventory has not changed in ProviderTree for provider: 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 835.152545] env[69367]: DEBUG oslo_vmware.api [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]526e07ef-717a-0c2a-dc52-d0e9767cbc0c, 'name': SearchDatastore_Task, 'duration_secs': 0.009147} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.153173] env[69367]: DEBUG oslo_concurrency.lockutils [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 835.153444] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore2] c272b0ae-6313-46ab-977c-6de255e77675/c272b0ae-6313-46ab-977c-6de255e77675.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 835.153698] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0a91726c-df35-4911-857d-f702bfdcd667 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.162406] env[69367]: DEBUG oslo_vmware.api [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Waiting for the task: (returnval){ [ 835.162406] env[69367]: value = "task-4234020" [ 835.162406] env[69367]: _type = "Task" [ 835.162406] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.174512] env[69367]: DEBUG oslo_vmware.api [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Task: {'id': task-4234020, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.201367] env[69367]: DEBUG nova.compute.manager [req-7b8deba0-c477-47c2-b2ed-2f81f52d503f req-de4ca034-694c-4204-8c22-678c23bc79fd service nova] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Received event network-vif-plugged-645e6040-4204-42a3-8600-608066c94ade {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 835.201609] env[69367]: DEBUG oslo_concurrency.lockutils [req-7b8deba0-c477-47c2-b2ed-2f81f52d503f req-de4ca034-694c-4204-8c22-678c23bc79fd service nova] Acquiring lock "48470f96-56d2-4ca2-8078-c5ff4f6db71b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 835.201823] env[69367]: DEBUG oslo_concurrency.lockutils [req-7b8deba0-c477-47c2-b2ed-2f81f52d503f req-de4ca034-694c-4204-8c22-678c23bc79fd service nova] Lock "48470f96-56d2-4ca2-8078-c5ff4f6db71b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 835.202133] env[69367]: DEBUG oslo_concurrency.lockutils [req-7b8deba0-c477-47c2-b2ed-2f81f52d503f req-de4ca034-694c-4204-8c22-678c23bc79fd service nova] Lock "48470f96-56d2-4ca2-8078-c5ff4f6db71b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 835.202251] env[69367]: DEBUG nova.compute.manager [req-7b8deba0-c477-47c2-b2ed-2f81f52d503f req-de4ca034-694c-4204-8c22-678c23bc79fd service nova] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] No waiting events found dispatching network-vif-plugged-645e6040-4204-42a3-8600-608066c94ade {{(pid=69367) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 835.202384] env[69367]: WARNING nova.compute.manager [req-7b8deba0-c477-47c2-b2ed-2f81f52d503f req-de4ca034-694c-4204-8c22-678c23bc79fd service nova] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Received unexpected event network-vif-plugged-645e6040-4204-42a3-8600-608066c94ade for instance with vm_state building and task_state spawning. [ 835.202605] env[69367]: DEBUG nova.compute.manager [req-7b8deba0-c477-47c2-b2ed-2f81f52d503f req-de4ca034-694c-4204-8c22-678c23bc79fd service nova] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Received event network-changed-645e6040-4204-42a3-8600-608066c94ade {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 835.202776] env[69367]: DEBUG nova.compute.manager [req-7b8deba0-c477-47c2-b2ed-2f81f52d503f req-de4ca034-694c-4204-8c22-678c23bc79fd service nova] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Refreshing instance network info cache due to event network-changed-645e6040-4204-42a3-8600-608066c94ade. {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11772}} [ 835.202958] env[69367]: DEBUG oslo_concurrency.lockutils [req-7b8deba0-c477-47c2-b2ed-2f81f52d503f req-de4ca034-694c-4204-8c22-678c23bc79fd service nova] Acquiring lock "refresh_cache-48470f96-56d2-4ca2-8078-c5ff4f6db71b" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 835.482602] env[69367]: DEBUG nova.network.neutron [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Updating instance_info_cache with network_info: [{"id": "7485ac3f-c5a1-4b84-a33a-afb79101e2e9", "address": "fa:16:3e:a0:08:3e", "network": {"id": "757a47fe-101f-496c-951e-88755d3ed618", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1957959951", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.111", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68ad9e06b1fb4e5bbad98a14e0c55c60", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b5291d0-ee0f-4d70-b2ae-ab6879a67b08", "external-id": "nsx-vlan-transportzone-597", "segmentation_id": 597, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7485ac3f-c5", "ovs_interfaceid": "7485ac3f-c5a1-4b84-a33a-afb79101e2e9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "19be5951-2642-49af-aaa5-d15bc24f0434", "address": "fa:16:3e:1e:56:8f", "network": {"id": "21e431ac-8ad6-4f28-87c8-85b2890870f8", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1989947687", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.51", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "68ad9e06b1fb4e5bbad98a14e0c55c60", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c2d3bf80-d60a-4b53-a00a-1381de6d4a12", "external-id": "nsx-vlan-transportzone-982", "segmentation_id": 982, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap19be5951-26", "ovs_interfaceid": "19be5951-2642-49af-aaa5-d15bc24f0434", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "645e6040-4204-42a3-8600-608066c94ade", "address": "fa:16:3e:dd:a2:ef", "network": {"id": "757a47fe-101f-496c-951e-88755d3ed618", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1957959951", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.52", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68ad9e06b1fb4e5bbad98a14e0c55c60", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b5291d0-ee0f-4d70-b2ae-ab6879a67b08", "external-id": "nsx-vlan-transportzone-597", "segmentation_id": 597, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap645e6040-42", "ovs_interfaceid": "645e6040-4204-42a3-8600-608066c94ade", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 835.516922] env[69367]: DEBUG nova.network.neutron [-] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 835.535454] env[69367]: DEBUG oslo_vmware.api [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Task: {'id': task-4234019, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.511869} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.535789] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore2] 95efcff3-a81b-49fb-b85a-dae060c023b2/95efcff3-a81b-49fb-b85a-dae060c023b2.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 835.536050] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Extending root virtual disk to 1048576 {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 835.536388] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3acfea4a-89fb-4832-86b4-ee2682a079ea {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.544023] env[69367]: DEBUG oslo_vmware.api [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Waiting for the task: (returnval){ [ 835.544023] env[69367]: value = "task-4234021" [ 835.544023] env[69367]: _type = "Task" [ 835.544023] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.555998] env[69367]: DEBUG oslo_vmware.api [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Task: {'id': task-4234021, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.556294] env[69367]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-51ca520e-5dd0-40ae-bc01-520b76799310 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.565933] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1d383c9-524d-4e5b-9a6a-a4ec05e41b50 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.593690] env[69367]: DEBUG nova.compute.manager [req-6d2dd7c1-49a6-47a7-9771-94b708bb835b req-45f312dc-34ae-4464-8320-18262e12f173 service nova] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Detach interface failed, port_id=5401116f-daf2-4db0-b052-7bd1adb63cc1, reason: Instance d2f8328d-fd05-4e63-9cbd-a6e3ec948964 could not be found. {{(pid=69367) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11601}} [ 835.614956] env[69367]: DEBUG nova.compute.manager [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 4a46d003-f57e-4089-aa60-757a4246f071] Start spawning the instance on the hypervisor. {{(pid=69367) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 835.644308] env[69367]: DEBUG nova.virt.hardware [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-19T12:49:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-19T12:49:28Z,direct_url=,disk_format='vmdk',id=2b099420-9152-4d93-9609-4c9317824c11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cd7c200d5cd6461fb951580f8c764c42',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-19T12:49:29Z,virtual_size=,visibility=), allow threads: False {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 835.644573] env[69367]: DEBUG nova.virt.hardware [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Flavor limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 835.644732] env[69367]: DEBUG nova.virt.hardware [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Image limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 835.644915] env[69367]: DEBUG nova.virt.hardware [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Flavor pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 835.645072] env[69367]: DEBUG nova.virt.hardware [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Image pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 835.645225] env[69367]: DEBUG nova.virt.hardware [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 835.645513] env[69367]: DEBUG nova.virt.hardware [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 835.645697] env[69367]: DEBUG nova.virt.hardware [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 835.645863] env[69367]: DEBUG nova.virt.hardware [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Got 1 possible topologies {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 835.646105] env[69367]: DEBUG nova.virt.hardware [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 835.646367] env[69367]: DEBUG nova.virt.hardware [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 835.647323] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f2c0c90-4935-4cea-a619-79a0a4bf95a2 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.656332] env[69367]: DEBUG nova.scheduler.client.report [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Inventory has not changed for provider 19ddf8be-7305-4f70-8366-52a9957232e6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 835.661510] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50e044cb-55df-4b27-bdd6-248140f31092 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.686716] env[69367]: DEBUG oslo_vmware.api [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Task: {'id': task-4234020, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.986480] env[69367]: DEBUG oslo_concurrency.lockutils [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Releasing lock "refresh_cache-48470f96-56d2-4ca2-8078-c5ff4f6db71b" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 835.991717] env[69367]: DEBUG nova.compute.manager [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Instance network_info: |[{"id": "7485ac3f-c5a1-4b84-a33a-afb79101e2e9", "address": "fa:16:3e:a0:08:3e", "network": {"id": "757a47fe-101f-496c-951e-88755d3ed618", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1957959951", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.111", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68ad9e06b1fb4e5bbad98a14e0c55c60", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b5291d0-ee0f-4d70-b2ae-ab6879a67b08", "external-id": "nsx-vlan-transportzone-597", "segmentation_id": 597, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7485ac3f-c5", "ovs_interfaceid": "7485ac3f-c5a1-4b84-a33a-afb79101e2e9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "19be5951-2642-49af-aaa5-d15bc24f0434", "address": "fa:16:3e:1e:56:8f", "network": {"id": "21e431ac-8ad6-4f28-87c8-85b2890870f8", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1989947687", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.51", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "68ad9e06b1fb4e5bbad98a14e0c55c60", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c2d3bf80-d60a-4b53-a00a-1381de6d4a12", "external-id": "nsx-vlan-transportzone-982", "segmentation_id": 982, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap19be5951-26", "ovs_interfaceid": "19be5951-2642-49af-aaa5-d15bc24f0434", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "645e6040-4204-42a3-8600-608066c94ade", "address": "fa:16:3e:dd:a2:ef", "network": {"id": "757a47fe-101f-496c-951e-88755d3ed618", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1957959951", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.52", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68ad9e06b1fb4e5bbad98a14e0c55c60", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b5291d0-ee0f-4d70-b2ae-ab6879a67b08", "external-id": "nsx-vlan-transportzone-597", "segmentation_id": 597, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap645e6040-42", "ovs_interfaceid": "645e6040-4204-42a3-8600-608066c94ade", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69367) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 835.991717] env[69367]: DEBUG oslo_concurrency.lockutils [req-7b8deba0-c477-47c2-b2ed-2f81f52d503f req-de4ca034-694c-4204-8c22-678c23bc79fd service nova] Acquired lock "refresh_cache-48470f96-56d2-4ca2-8078-c5ff4f6db71b" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 835.991717] env[69367]: DEBUG nova.network.neutron [req-7b8deba0-c477-47c2-b2ed-2f81f52d503f req-de4ca034-694c-4204-8c22-678c23bc79fd service nova] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Refreshing network info cache for port 645e6040-4204-42a3-8600-608066c94ade {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 835.992610] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a0:08:3e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6b5291d0-ee0f-4d70-b2ae-ab6879a67b08', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7485ac3f-c5a1-4b84-a33a-afb79101e2e9', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:1e:56:8f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c2d3bf80-d60a-4b53-a00a-1381de6d4a12', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '19be5951-2642-49af-aaa5-d15bc24f0434', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:dd:a2:ef', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6b5291d0-ee0f-4d70-b2ae-ab6879a67b08', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '645e6040-4204-42a3-8600-608066c94ade', 'vif_model': 'vmxnet3'}] {{(pid=69367) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 836.005907] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Creating folder: Project (68ad9e06b1fb4e5bbad98a14e0c55c60). Parent ref: group-v837645. {{(pid=69367) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 836.012018] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3c297192-8bf4-425a-9b5f-2179a3b54706 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.019706] env[69367]: INFO nova.compute.manager [-] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Took 1.65 seconds to deallocate network for instance. [ 836.030852] env[69367]: INFO nova.virt.vmwareapi.vm_util [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Created folder: Project (68ad9e06b1fb4e5bbad98a14e0c55c60) in parent group-v837645. [ 836.031299] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Creating folder: Instances. Parent ref: group-v837734. {{(pid=69367) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 836.031919] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5a93af8c-6d9f-4ac7-8954-6f6c8cf39c6f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.046042] env[69367]: INFO nova.virt.vmwareapi.vm_util [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Created folder: Instances in parent group-v837734. [ 836.046042] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 836.046042] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Creating VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 836.050313] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-66c8213d-36f7-4aa5-a5ca-b84fded0127a {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.078336] env[69367]: DEBUG oslo_vmware.api [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Task: {'id': task-4234021, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070928} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.079890] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Extended root virtual disk {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 836.081192] env[69367]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 836.081192] env[69367]: value = "task-4234024" [ 836.081192] env[69367]: _type = "Task" [ 836.081192] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.081192] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99fdb779-0a00-4240-90b5-f6e63739cb30 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.112807] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Reconfiguring VM instance instance-00000049 to attach disk [datastore2] 95efcff3-a81b-49fb-b85a-dae060c023b2/95efcff3-a81b-49fb-b85a-dae060c023b2.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 836.116799] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-35a4b915-a4cd-4cc1-9748-18cdfdf8868b {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.132674] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234024, 'name': CreateVM_Task} progress is 5%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.138581] env[69367]: DEBUG oslo_vmware.api [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Waiting for the task: (returnval){ [ 836.138581] env[69367]: value = "task-4234025" [ 836.138581] env[69367]: _type = "Task" [ 836.138581] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.152718] env[69367]: DEBUG oslo_vmware.api [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Task: {'id': task-4234025, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.160649] env[69367]: DEBUG nova.network.neutron [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 4a46d003-f57e-4089-aa60-757a4246f071] Successfully updated port: badee9f4-aebf-4455-81d3-ddbb3adb8072 {{(pid=69367) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 836.168413] env[69367]: DEBUG oslo_concurrency.lockutils [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.604s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 836.168413] env[69367]: DEBUG nova.compute.manager [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 46b6bc45-57f0-4850-9249-6bbb22b162c6] Start building networks asynchronously for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 836.173985] env[69367]: DEBUG oslo_concurrency.lockutils [None req-29fe1fd3-03fb-4277-958e-0ff9f92cf48e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.013s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 836.174223] env[69367]: DEBUG oslo_concurrency.lockutils [None req-29fe1fd3-03fb-4277-958e-0ff9f92cf48e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 836.174539] env[69367]: INFO nova.compute.manager [None req-29fe1fd3-03fb-4277-958e-0ff9f92cf48e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: 7f937d89-684b-44f5-9f30-783aeafe99d1] Successfully reverted task state from None on failure for instance. [ 836.177164] env[69367]: DEBUG oslo_concurrency.lockutils [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.897s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 836.178710] env[69367]: INFO nova.compute.claims [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 54a1f586-481d-427e-ba0b-be90e5573bd3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 836.186400] env[69367]: ERROR oslo_messaging.rpc.server [None req-29fe1fd3-03fb-4277-958e-0ff9f92cf48e tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Exception during message handling: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 836.186400] env[69367]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 836.186400] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 836.186400] env[69367]: ERROR oslo_messaging.rpc.server yield [ 836.186400] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 836.186400] env[69367]: ERROR oslo_messaging.rpc.server self.set_inventory_for_provider( [ 836.186400] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 836.186400] env[69367]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 836.186400] env[69367]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-8b0c120c-ba59-4be3-95e3-a3daf1389acc"}]} [ 836.186400] env[69367]: ERROR oslo_messaging.rpc.server [ 836.186400] env[69367]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 836.186400] env[69367]: ERROR oslo_messaging.rpc.server [ 836.186400] env[69367]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 836.186400] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 174, in _process_incoming [ 836.186400] env[69367]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 836.186400] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 836.186400] env[69367]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 836.186400] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 836.186400] env[69367]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 836.186400] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 836.186400] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 836.186400] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 836.186400] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 836.186400] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 836.186400] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 836.186400] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 836.186400] env[69367]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 836.186400] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 167, in decorated_function [ 836.186400] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 836.186400] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 836.186400] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 836.186400] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 836.186400] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 836.186400] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 158, in decorated_function [ 836.186400] env[69367]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 836.186400] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1483, in decorated_function [ 836.186400] env[69367]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 836.186400] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 214, in decorated_function [ 836.186400] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 836.186400] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 836.186400] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 836.186400] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 836.186400] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 836.186400] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 204, in decorated_function [ 836.186400] env[69367]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 836.186400] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3388, in terminate_instance [ 836.186400] env[69367]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 836.188117] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 836.188117] env[69367]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 836.188117] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3383, in do_terminate_instance [ 836.188117] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 836.188117] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 836.188117] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 836.188117] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 836.188117] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 836.188117] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 836.188117] env[69367]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 836.188117] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 836.188117] env[69367]: ERROR oslo_messaging.rpc.server self._complete_deletion(context, instance) [ 836.188117] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 836.188117] env[69367]: ERROR oslo_messaging.rpc.server self._update_resource_tracker(context, instance) [ 836.188117] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 836.188117] env[69367]: ERROR oslo_messaging.rpc.server self.rt.update_usage(context, instance, instance.node) [ 836.188117] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 836.188117] env[69367]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 836.188117] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 836.188117] env[69367]: ERROR oslo_messaging.rpc.server self._update(context.elevated(), self.compute_nodes[nodename]) [ 836.188117] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 836.188117] env[69367]: ERROR oslo_messaging.rpc.server self._update_to_placement(context, compute_node, startup) [ 836.188117] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 836.188117] env[69367]: ERROR oslo_messaging.rpc.server return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 836.188117] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 836.188117] env[69367]: ERROR oslo_messaging.rpc.server return attempt.get(self._wrap_exception) [ 836.188117] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 836.188117] env[69367]: ERROR oslo_messaging.rpc.server six.reraise(self.value[0], self.value[1], self.value[2]) [ 836.188117] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 836.188117] env[69367]: ERROR oslo_messaging.rpc.server raise value [ 836.188117] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 836.188117] env[69367]: ERROR oslo_messaging.rpc.server attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 836.188117] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 836.188117] env[69367]: ERROR oslo_messaging.rpc.server self.reportclient.update_from_provider_tree( [ 836.188117] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 836.188117] env[69367]: ERROR oslo_messaging.rpc.server with catch_all(pd.uuid): [ 836.188117] env[69367]: ERROR oslo_messaging.rpc.server File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 836.188117] env[69367]: ERROR oslo_messaging.rpc.server self.gen.throw(typ, value, traceback) [ 836.188117] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 836.188117] env[69367]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderSyncFailed() [ 836.188117] env[69367]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 836.188117] env[69367]: ERROR oslo_messaging.rpc.server [ 836.197108] env[69367]: DEBUG oslo_vmware.api [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Task: {'id': task-4234020, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.803566} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.197550] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore2] c272b0ae-6313-46ab-977c-6de255e77675/c272b0ae-6313-46ab-977c-6de255e77675.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 836.197930] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] [instance: c272b0ae-6313-46ab-977c-6de255e77675] Extending root virtual disk to 1048576 {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 836.198931] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f9a2cd3a-1012-4e2b-b5e8-2ec566eb9878 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.208758] env[69367]: DEBUG oslo_vmware.api [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Waiting for the task: (returnval){ [ 836.208758] env[69367]: value = "task-4234026" [ 836.208758] env[69367]: _type = "Task" [ 836.208758] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.221929] env[69367]: DEBUG oslo_vmware.api [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Task: {'id': task-4234026, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.368685] env[69367]: DEBUG nova.network.neutron [req-7b8deba0-c477-47c2-b2ed-2f81f52d503f req-de4ca034-694c-4204-8c22-678c23bc79fd service nova] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Updated VIF entry in instance network info cache for port 645e6040-4204-42a3-8600-608066c94ade. {{(pid=69367) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 836.369243] env[69367]: DEBUG nova.network.neutron [req-7b8deba0-c477-47c2-b2ed-2f81f52d503f req-de4ca034-694c-4204-8c22-678c23bc79fd service nova] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Updating instance_info_cache with network_info: [{"id": "7485ac3f-c5a1-4b84-a33a-afb79101e2e9", "address": "fa:16:3e:a0:08:3e", "network": {"id": "757a47fe-101f-496c-951e-88755d3ed618", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1957959951", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.111", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68ad9e06b1fb4e5bbad98a14e0c55c60", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b5291d0-ee0f-4d70-b2ae-ab6879a67b08", "external-id": "nsx-vlan-transportzone-597", "segmentation_id": 597, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7485ac3f-c5", "ovs_interfaceid": "7485ac3f-c5a1-4b84-a33a-afb79101e2e9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "19be5951-2642-49af-aaa5-d15bc24f0434", "address": "fa:16:3e:1e:56:8f", "network": {"id": "21e431ac-8ad6-4f28-87c8-85b2890870f8", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1989947687", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.51", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "68ad9e06b1fb4e5bbad98a14e0c55c60", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c2d3bf80-d60a-4b53-a00a-1381de6d4a12", "external-id": "nsx-vlan-transportzone-982", "segmentation_id": 982, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap19be5951-26", "ovs_interfaceid": "19be5951-2642-49af-aaa5-d15bc24f0434", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "645e6040-4204-42a3-8600-608066c94ade", "address": "fa:16:3e:dd:a2:ef", "network": {"id": "757a47fe-101f-496c-951e-88755d3ed618", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1957959951", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.52", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68ad9e06b1fb4e5bbad98a14e0c55c60", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b5291d0-ee0f-4d70-b2ae-ab6879a67b08", "external-id": "nsx-vlan-transportzone-597", "segmentation_id": 597, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap645e6040-42", "ovs_interfaceid": "645e6040-4204-42a3-8600-608066c94ade", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 836.533364] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5fd10dcf-a94e-4432-b8c9-227cd572cc3a tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 836.596712] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234024, 'name': CreateVM_Task, 'duration_secs': 0.515946} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.596895] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Created VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 836.597940] env[69367]: DEBUG oslo_concurrency.lockutils [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 836.598151] env[69367]: DEBUG oslo_concurrency.lockutils [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 836.598491] env[69367]: DEBUG oslo_concurrency.lockutils [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 836.598751] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e72b976f-9255-4c09-a604-a4c558c38a34 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.603681] env[69367]: DEBUG oslo_vmware.api [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Waiting for the task: (returnval){ [ 836.603681] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]527f820f-3bc7-6334-b864-f3044cc0e985" [ 836.603681] env[69367]: _type = "Task" [ 836.603681] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.612136] env[69367]: DEBUG oslo_vmware.api [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]527f820f-3bc7-6334-b864-f3044cc0e985, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.649040] env[69367]: DEBUG oslo_vmware.api [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Task: {'id': task-4234025, 'name': ReconfigVM_Task, 'duration_secs': 0.313252} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.649319] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Reconfigured VM instance instance-00000049 to attach disk [datastore2] 95efcff3-a81b-49fb-b85a-dae060c023b2/95efcff3-a81b-49fb-b85a-dae060c023b2.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 836.649988] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-37c132cf-6274-42e4-90ca-4edc90a7cfe6 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.658211] env[69367]: DEBUG oslo_vmware.api [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Waiting for the task: (returnval){ [ 836.658211] env[69367]: value = "task-4234027" [ 836.658211] env[69367]: _type = "Task" [ 836.658211] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.663159] env[69367]: DEBUG oslo_concurrency.lockutils [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Acquiring lock "refresh_cache-4a46d003-f57e-4089-aa60-757a4246f071" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 836.663335] env[69367]: DEBUG oslo_concurrency.lockutils [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Acquired lock "refresh_cache-4a46d003-f57e-4089-aa60-757a4246f071" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 836.663512] env[69367]: DEBUG nova.network.neutron [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 4a46d003-f57e-4089-aa60-757a4246f071] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 836.670206] env[69367]: DEBUG oslo_vmware.api [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Task: {'id': task-4234027, 'name': Rename_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.689290] env[69367]: DEBUG nova.compute.utils [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Using /dev/sd instead of None {{(pid=69367) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 836.693218] env[69367]: DEBUG nova.compute.manager [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 46b6bc45-57f0-4850-9249-6bbb22b162c6] Allocating IP information in the background. {{(pid=69367) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 836.693481] env[69367]: DEBUG nova.network.neutron [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 46b6bc45-57f0-4850-9249-6bbb22b162c6] allocate_for_instance() {{(pid=69367) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 836.719602] env[69367]: DEBUG oslo_vmware.api [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Task: {'id': task-4234026, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.083287} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.719893] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] [instance: c272b0ae-6313-46ab-977c-6de255e77675] Extended root virtual disk {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 836.720713] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d0b8e04-7a5f-410c-88b7-8bdd08378c76 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.742489] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] [instance: c272b0ae-6313-46ab-977c-6de255e77675] Reconfiguring VM instance instance-0000004a to attach disk [datastore2] c272b0ae-6313-46ab-977c-6de255e77675/c272b0ae-6313-46ab-977c-6de255e77675.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 836.744299] env[69367]: DEBUG nova.policy [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8b3d86a66b654d4fbcc4c4cab4c0ad75', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cfbdfc3a96db40c8a3e14c797422f08e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69367) authorize /opt/stack/nova/nova/policy.py:192}} [ 836.746070] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dfd6a6fe-3b00-41d5-b7d2-2c9c2601505a {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.767130] env[69367]: DEBUG oslo_vmware.api [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Waiting for the task: (returnval){ [ 836.767130] env[69367]: value = "task-4234028" [ 836.767130] env[69367]: _type = "Task" [ 836.767130] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.776737] env[69367]: DEBUG oslo_vmware.api [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Task: {'id': task-4234028, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.872689] env[69367]: DEBUG oslo_concurrency.lockutils [req-7b8deba0-c477-47c2-b2ed-2f81f52d503f req-de4ca034-694c-4204-8c22-678c23bc79fd service nova] Releasing lock "refresh_cache-48470f96-56d2-4ca2-8078-c5ff4f6db71b" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 837.071871] env[69367]: DEBUG nova.network.neutron [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 46b6bc45-57f0-4850-9249-6bbb22b162c6] Successfully created port: 4fc784f5-80ec-41ce-bb71-af0e71d38e84 {{(pid=69367) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 837.117127] env[69367]: DEBUG oslo_vmware.api [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]527f820f-3bc7-6334-b864-f3044cc0e985, 'name': SearchDatastore_Task, 'duration_secs': 0.009649} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.117127] env[69367]: DEBUG oslo_concurrency.lockutils [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 837.118042] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Processing image 2b099420-9152-4d93-9609-4c9317824c11 {{(pid=69367) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 837.121800] env[69367]: DEBUG oslo_concurrency.lockutils [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 837.121800] env[69367]: DEBUG oslo_concurrency.lockutils [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 837.121800] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 837.121800] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cb62902d-fdbb-461a-b1fb-b2fc4660d0a6 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.129986] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 837.130214] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69367) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 837.130944] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e3ca5a53-89a4-41bc-8bb2-4ed7a2c4d161 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.137760] env[69367]: DEBUG oslo_vmware.api [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Waiting for the task: (returnval){ [ 837.137760] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52885cf7-344c-068e-c26e-d5f60c3d07f8" [ 837.137760] env[69367]: _type = "Task" [ 837.137760] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.147288] env[69367]: DEBUG oslo_vmware.api [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52885cf7-344c-068e-c26e-d5f60c3d07f8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.168663] env[69367]: DEBUG oslo_vmware.api [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Task: {'id': task-4234027, 'name': Rename_Task, 'duration_secs': 0.155411} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.168996] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Powering on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 837.169275] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fde2137d-6a85-49de-9369-4cc2fd55e05f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.178630] env[69367]: DEBUG oslo_vmware.api [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Waiting for the task: (returnval){ [ 837.178630] env[69367]: value = "task-4234029" [ 837.178630] env[69367]: _type = "Task" [ 837.178630] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.188222] env[69367]: DEBUG oslo_vmware.api [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Task: {'id': task-4234029, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.197395] env[69367]: DEBUG nova.compute.manager [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 46b6bc45-57f0-4850-9249-6bbb22b162c6] Start building block device mappings for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 837.244913] env[69367]: DEBUG nova.compute.manager [req-c17b20af-37e2-4f42-9cb1-7a89d6c135ac req-2a277af3-2d06-4d39-9097-a4c8845642bc service nova] [instance: 4a46d003-f57e-4089-aa60-757a4246f071] Received event network-vif-plugged-badee9f4-aebf-4455-81d3-ddbb3adb8072 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 837.244913] env[69367]: DEBUG oslo_concurrency.lockutils [req-c17b20af-37e2-4f42-9cb1-7a89d6c135ac req-2a277af3-2d06-4d39-9097-a4c8845642bc service nova] Acquiring lock "4a46d003-f57e-4089-aa60-757a4246f071-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 837.244913] env[69367]: DEBUG oslo_concurrency.lockutils [req-c17b20af-37e2-4f42-9cb1-7a89d6c135ac req-2a277af3-2d06-4d39-9097-a4c8845642bc service nova] Lock "4a46d003-f57e-4089-aa60-757a4246f071-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 837.245133] env[69367]: DEBUG oslo_concurrency.lockutils [req-c17b20af-37e2-4f42-9cb1-7a89d6c135ac req-2a277af3-2d06-4d39-9097-a4c8845642bc service nova] Lock "4a46d003-f57e-4089-aa60-757a4246f071-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 837.245171] env[69367]: DEBUG nova.compute.manager [req-c17b20af-37e2-4f42-9cb1-7a89d6c135ac req-2a277af3-2d06-4d39-9097-a4c8845642bc service nova] [instance: 4a46d003-f57e-4089-aa60-757a4246f071] No waiting events found dispatching network-vif-plugged-badee9f4-aebf-4455-81d3-ddbb3adb8072 {{(pid=69367) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 837.248020] env[69367]: WARNING nova.compute.manager [req-c17b20af-37e2-4f42-9cb1-7a89d6c135ac req-2a277af3-2d06-4d39-9097-a4c8845642bc service nova] [instance: 4a46d003-f57e-4089-aa60-757a4246f071] Received unexpected event network-vif-plugged-badee9f4-aebf-4455-81d3-ddbb3adb8072 for instance with vm_state building and task_state spawning. [ 837.248020] env[69367]: DEBUG nova.compute.manager [req-c17b20af-37e2-4f42-9cb1-7a89d6c135ac req-2a277af3-2d06-4d39-9097-a4c8845642bc service nova] [instance: 4a46d003-f57e-4089-aa60-757a4246f071] Received event network-changed-badee9f4-aebf-4455-81d3-ddbb3adb8072 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 837.248020] env[69367]: DEBUG nova.compute.manager [req-c17b20af-37e2-4f42-9cb1-7a89d6c135ac req-2a277af3-2d06-4d39-9097-a4c8845642bc service nova] [instance: 4a46d003-f57e-4089-aa60-757a4246f071] Refreshing instance network info cache due to event network-changed-badee9f4-aebf-4455-81d3-ddbb3adb8072. {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11772}} [ 837.248020] env[69367]: DEBUG oslo_concurrency.lockutils [req-c17b20af-37e2-4f42-9cb1-7a89d6c135ac req-2a277af3-2d06-4d39-9097-a4c8845642bc service nova] Acquiring lock "refresh_cache-4a46d003-f57e-4089-aa60-757a4246f071" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 837.248020] env[69367]: DEBUG nova.network.neutron [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 4a46d003-f57e-4089-aa60-757a4246f071] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 837.285135] env[69367]: DEBUG oslo_vmware.api [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Task: {'id': task-4234028, 'name': ReconfigVM_Task, 'duration_secs': 0.270491} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.286260] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] [instance: c272b0ae-6313-46ab-977c-6de255e77675] Reconfigured VM instance instance-0000004a to attach disk [datastore2] c272b0ae-6313-46ab-977c-6de255e77675/c272b0ae-6313-46ab-977c-6de255e77675.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 837.288676] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-644f9ca9-c1ae-4cb9-9b0a-5b2da7c33980 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.297043] env[69367]: DEBUG oslo_vmware.api [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Waiting for the task: (returnval){ [ 837.297043] env[69367]: value = "task-4234030" [ 837.297043] env[69367]: _type = "Task" [ 837.297043] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.309952] env[69367]: DEBUG oslo_vmware.api [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Task: {'id': task-4234030, 'name': Rename_Task} progress is 6%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.641144] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a2c468f-402e-4c7e-b1c9-38a7e487b7ac {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.662797] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e29b75b2-9f06-4b09-ae9f-6a350b820cea {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.668241] env[69367]: DEBUG oslo_vmware.api [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52885cf7-344c-068e-c26e-d5f60c3d07f8, 'name': SearchDatastore_Task, 'duration_secs': 0.038934} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.668591] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c9f97eab-94e6-4892-ad17-6c585fe039bd {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.704116] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1683a6e-3738-4fdd-bd22-0b915202b160 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.712196] env[69367]: DEBUG oslo_vmware.api [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Waiting for the task: (returnval){ [ 837.712196] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52523e21-5309-dc05-71d3-515f6c054ea1" [ 837.712196] env[69367]: _type = "Task" [ 837.712196] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.723467] env[69367]: DEBUG oslo_vmware.api [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Task: {'id': task-4234029, 'name': PowerOnVM_Task, 'duration_secs': 0.505448} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.724161] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Powered on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 837.724423] env[69367]: INFO nova.compute.manager [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Took 7.12 seconds to spawn the instance on the hypervisor. [ 837.725028] env[69367]: DEBUG nova.compute.manager [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Checking state {{(pid=69367) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 837.726301] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70993d2c-9185-4826-8d5d-048464427a38 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.734407] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d85ae80-2669-40bd-93c9-7e22be948aa3 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.736947] env[69367]: DEBUG oslo_vmware.api [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52523e21-5309-dc05-71d3-515f6c054ea1, 'name': SearchDatastore_Task, 'duration_secs': 0.010121} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.737640] env[69367]: DEBUG oslo_concurrency.lockutils [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 837.737936] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore2] 48470f96-56d2-4ca2-8078-c5ff4f6db71b/48470f96-56d2-4ca2-8078-c5ff4f6db71b.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 837.739406] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8ef1157e-c74c-43fb-8696-e46dfecb254c {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.756025] env[69367]: DEBUG nova.compute.provider_tree [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Inventory has not changed in ProviderTree for provider: 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 837.761034] env[69367]: DEBUG oslo_vmware.api [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Waiting for the task: (returnval){ [ 837.761034] env[69367]: value = "task-4234031" [ 837.761034] env[69367]: _type = "Task" [ 837.761034] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.771340] env[69367]: DEBUG oslo_vmware.api [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Task: {'id': task-4234031, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.786285] env[69367]: DEBUG nova.network.neutron [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 4a46d003-f57e-4089-aa60-757a4246f071] Updating instance_info_cache with network_info: [{"id": "badee9f4-aebf-4455-81d3-ddbb3adb8072", "address": "fa:16:3e:c6:b1:86", "network": {"id": "55efdf13-76a4-4190-9a6e-e890b3e8cc39", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1839781723-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cfbdfc3a96db40c8a3e14c797422f08e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa0d7fd6-4fd3-4451-9208-8a3cf25e30e4", "external-id": "nsx-vlan-transportzone-545", "segmentation_id": 545, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbadee9f4-ae", "ovs_interfaceid": "badee9f4-aebf-4455-81d3-ddbb3adb8072", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 837.808343] env[69367]: DEBUG oslo_vmware.api [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Task: {'id': task-4234030, 'name': Rename_Task, 'duration_secs': 0.159658} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.808650] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] [instance: c272b0ae-6313-46ab-977c-6de255e77675] Powering on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 837.808959] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7a1734d4-5394-4da8-97d3-487ba2c72f88 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.816974] env[69367]: DEBUG oslo_vmware.api [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Waiting for the task: (returnval){ [ 837.816974] env[69367]: value = "task-4234032" [ 837.816974] env[69367]: _type = "Task" [ 837.816974] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.828186] env[69367]: DEBUG oslo_vmware.api [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Task: {'id': task-4234032, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.215474] env[69367]: DEBUG nova.compute.manager [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 46b6bc45-57f0-4850-9249-6bbb22b162c6] Start spawning the instance on the hypervisor. {{(pid=69367) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 838.252113] env[69367]: DEBUG nova.virt.hardware [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-19T12:49:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-19T12:49:28Z,direct_url=,disk_format='vmdk',id=2b099420-9152-4d93-9609-4c9317824c11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cd7c200d5cd6461fb951580f8c764c42',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-19T12:49:29Z,virtual_size=,visibility=), allow threads: False {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 838.252444] env[69367]: DEBUG nova.virt.hardware [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Flavor limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 838.252756] env[69367]: DEBUG nova.virt.hardware [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Image limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 838.253033] env[69367]: DEBUG nova.virt.hardware [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Flavor pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 838.253269] env[69367]: DEBUG nova.virt.hardware [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Image pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 838.253397] env[69367]: DEBUG nova.virt.hardware [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 838.253659] env[69367]: DEBUG nova.virt.hardware [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 838.253835] env[69367]: DEBUG nova.virt.hardware [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 838.254074] env[69367]: DEBUG nova.virt.hardware [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Got 1 possible topologies {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 838.254270] env[69367]: DEBUG nova.virt.hardware [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 838.254513] env[69367]: DEBUG nova.virt.hardware [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 838.259893] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17fbfef1-1035-4a66-9dec-3281af9a9620 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.265867] env[69367]: DEBUG nova.scheduler.client.report [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Inventory has not changed for provider 19ddf8be-7305-4f70-8366-52a9957232e6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 838.269610] env[69367]: INFO nova.compute.manager [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Took 30.85 seconds to build instance. [ 838.282986] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93152e7b-0aad-450c-a943-18fe694eca21 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.286930] env[69367]: DEBUG oslo_vmware.api [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Task: {'id': task-4234031, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.288473] env[69367]: DEBUG oslo_concurrency.lockutils [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Releasing lock "refresh_cache-4a46d003-f57e-4089-aa60-757a4246f071" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 838.288918] env[69367]: DEBUG nova.compute.manager [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 4a46d003-f57e-4089-aa60-757a4246f071] Instance network_info: |[{"id": "badee9f4-aebf-4455-81d3-ddbb3adb8072", "address": "fa:16:3e:c6:b1:86", "network": {"id": "55efdf13-76a4-4190-9a6e-e890b3e8cc39", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1839781723-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cfbdfc3a96db40c8a3e14c797422f08e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa0d7fd6-4fd3-4451-9208-8a3cf25e30e4", "external-id": "nsx-vlan-transportzone-545", "segmentation_id": 545, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbadee9f4-ae", "ovs_interfaceid": "badee9f4-aebf-4455-81d3-ddbb3adb8072", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69367) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 838.290405] env[69367]: DEBUG oslo_concurrency.lockutils [req-c17b20af-37e2-4f42-9cb1-7a89d6c135ac req-2a277af3-2d06-4d39-9097-a4c8845642bc service nova] Acquired lock "refresh_cache-4a46d003-f57e-4089-aa60-757a4246f071" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 838.290631] env[69367]: DEBUG nova.network.neutron [req-c17b20af-37e2-4f42-9cb1-7a89d6c135ac req-2a277af3-2d06-4d39-9097-a4c8845642bc service nova] [instance: 4a46d003-f57e-4089-aa60-757a4246f071] Refreshing network info cache for port badee9f4-aebf-4455-81d3-ddbb3adb8072 {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 838.291936] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 4a46d003-f57e-4089-aa60-757a4246f071] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c6:b1:86', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aa0d7fd6-4fd3-4451-9208-8a3cf25e30e4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'badee9f4-aebf-4455-81d3-ddbb3adb8072', 'vif_model': 'vmxnet3'}] {{(pid=69367) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 838.299520] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Creating folder: Project (cfbdfc3a96db40c8a3e14c797422f08e). Parent ref: group-v837645. {{(pid=69367) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 838.311102] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5bff500b-c3cc-48d0-8ee4-5d03073f6c5d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.322915] env[69367]: INFO nova.virt.vmwareapi.vm_util [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Created folder: Project (cfbdfc3a96db40c8a3e14c797422f08e) in parent group-v837645. [ 838.323085] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Creating folder: Instances. Parent ref: group-v837737. {{(pid=69367) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 838.323642] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4cef9bf9-b484-4689-9e79-a10cb363844a {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.329017] env[69367]: DEBUG oslo_vmware.api [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Task: {'id': task-4234032, 'name': PowerOnVM_Task} progress is 87%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.337537] env[69367]: INFO nova.virt.vmwareapi.vm_util [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Created folder: Instances in parent group-v837737. [ 838.337775] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 838.337972] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4a46d003-f57e-4089-aa60-757a4246f071] Creating VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 838.338200] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-be0121c6-3338-447b-8b3b-cba95f63f2f8 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.358519] env[69367]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 838.358519] env[69367]: value = "task-4234035" [ 838.358519] env[69367]: _type = "Task" [ 838.358519] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.369308] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234035, 'name': CreateVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.583021] env[69367]: DEBUG nova.network.neutron [req-c17b20af-37e2-4f42-9cb1-7a89d6c135ac req-2a277af3-2d06-4d39-9097-a4c8845642bc service nova] [instance: 4a46d003-f57e-4089-aa60-757a4246f071] Updated VIF entry in instance network info cache for port badee9f4-aebf-4455-81d3-ddbb3adb8072. {{(pid=69367) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 838.583021] env[69367]: DEBUG nova.network.neutron [req-c17b20af-37e2-4f42-9cb1-7a89d6c135ac req-2a277af3-2d06-4d39-9097-a4c8845642bc service nova] [instance: 4a46d003-f57e-4089-aa60-757a4246f071] Updating instance_info_cache with network_info: [{"id": "badee9f4-aebf-4455-81d3-ddbb3adb8072", "address": "fa:16:3e:c6:b1:86", "network": {"id": "55efdf13-76a4-4190-9a6e-e890b3e8cc39", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1839781723-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cfbdfc3a96db40c8a3e14c797422f08e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa0d7fd6-4fd3-4451-9208-8a3cf25e30e4", "external-id": "nsx-vlan-transportzone-545", "segmentation_id": 545, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbadee9f4-ae", "ovs_interfaceid": "badee9f4-aebf-4455-81d3-ddbb3adb8072", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 838.775714] env[69367]: DEBUG oslo_concurrency.lockutils [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.599s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 838.776335] env[69367]: DEBUG nova.compute.manager [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 54a1f586-481d-427e-ba0b-be90e5573bd3] Start building networks asynchronously for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 838.779177] env[69367]: DEBUG oslo_concurrency.lockutils [None req-eab4171b-3d9a-4448-8ae3-e7d8a431472b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Lock "95efcff3-a81b-49fb-b85a-dae060c023b2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.498s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 838.783158] env[69367]: DEBUG oslo_concurrency.lockutils [None req-732e6b23-494d-4673-a2d3-4e5758dc78a7 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.864s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 838.783505] env[69367]: DEBUG oslo_concurrency.lockutils [None req-732e6b23-494d-4673-a2d3-4e5758dc78a7 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 838.783582] env[69367]: INFO nova.compute.manager [None req-732e6b23-494d-4673-a2d3-4e5758dc78a7 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] [instance: ab9d8e3e-65c5-4ac9-920f-3042b8cf2054] Successfully reverted task state from None on failure for instance. [ 838.786052] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 19.959s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 838.786242] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 838.786404] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69367) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 838.786773] env[69367]: DEBUG oslo_concurrency.lockutils [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.931s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 838.788434] env[69367]: INFO nova.compute.claims [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] [instance: d900df05-b65c-4a45-94d1-563afbf9c022] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 838.793869] env[69367]: DEBUG oslo_vmware.api [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Task: {'id': task-4234031, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.534078} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.793869] env[69367]: ERROR oslo_messaging.rpc.server [None req-732e6b23-494d-4673-a2d3-4e5758dc78a7 tempest-MultipleCreateTestJSON-1024910139 tempest-MultipleCreateTestJSON-1024910139-project-member] Exception during message handling: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 838.793869] env[69367]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 838.793869] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 838.793869] env[69367]: ERROR oslo_messaging.rpc.server yield [ 838.793869] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 838.793869] env[69367]: ERROR oslo_messaging.rpc.server self.set_inventory_for_provider( [ 838.793869] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 838.793869] env[69367]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 838.793869] env[69367]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-c7eb4389-5f8c-443e-9afc-2c5fba19f38d"}]} [ 838.793869] env[69367]: ERROR oslo_messaging.rpc.server [ 838.793869] env[69367]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 838.793869] env[69367]: ERROR oslo_messaging.rpc.server [ 838.793869] env[69367]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 838.793869] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 174, in _process_incoming [ 838.793869] env[69367]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 838.793869] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 838.793869] env[69367]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 838.793869] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 838.793869] env[69367]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 838.793869] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 838.793869] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 838.793869] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 838.793869] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 838.793869] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 838.793869] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 838.793869] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 838.793869] env[69367]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 838.793869] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 167, in decorated_function [ 838.793869] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 838.793869] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 838.793869] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 838.793869] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 838.793869] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 838.793869] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 158, in decorated_function [ 838.793869] env[69367]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 838.793869] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1483, in decorated_function [ 838.793869] env[69367]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 838.793869] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 214, in decorated_function [ 838.793869] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 838.793869] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 838.793869] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 838.793869] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 838.793869] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 838.793869] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 204, in decorated_function [ 838.800916] env[69367]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 838.800916] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3388, in terminate_instance [ 838.800916] env[69367]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 838.800916] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 838.800916] env[69367]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 838.800916] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3383, in do_terminate_instance [ 838.800916] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 838.800916] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 838.800916] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 838.800916] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 838.800916] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 838.800916] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 838.800916] env[69367]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 838.800916] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 838.800916] env[69367]: ERROR oslo_messaging.rpc.server self._complete_deletion(context, instance) [ 838.800916] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 838.800916] env[69367]: ERROR oslo_messaging.rpc.server self._update_resource_tracker(context, instance) [ 838.800916] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 838.800916] env[69367]: ERROR oslo_messaging.rpc.server self.rt.update_usage(context, instance, instance.node) [ 838.800916] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 838.800916] env[69367]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 838.800916] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 838.800916] env[69367]: ERROR oslo_messaging.rpc.server self._update(context.elevated(), self.compute_nodes[nodename]) [ 838.800916] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 838.800916] env[69367]: ERROR oslo_messaging.rpc.server self._update_to_placement(context, compute_node, startup) [ 838.800916] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 838.800916] env[69367]: ERROR oslo_messaging.rpc.server return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 838.800916] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 838.800916] env[69367]: ERROR oslo_messaging.rpc.server return attempt.get(self._wrap_exception) [ 838.800916] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 838.800916] env[69367]: ERROR oslo_messaging.rpc.server six.reraise(self.value[0], self.value[1], self.value[2]) [ 838.800916] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 838.800916] env[69367]: ERROR oslo_messaging.rpc.server raise value [ 838.800916] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 838.800916] env[69367]: ERROR oslo_messaging.rpc.server attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 838.800916] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 838.800916] env[69367]: ERROR oslo_messaging.rpc.server self.reportclient.update_from_provider_tree( [ 838.800916] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 838.800916] env[69367]: ERROR oslo_messaging.rpc.server with catch_all(pd.uuid): [ 838.800916] env[69367]: ERROR oslo_messaging.rpc.server File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 838.800916] env[69367]: ERROR oslo_messaging.rpc.server self.gen.throw(typ, value, traceback) [ 838.800916] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 838.800916] env[69367]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderSyncFailed() [ 838.800916] env[69367]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 838.800916] env[69367]: ERROR oslo_messaging.rpc.server [ 838.800916] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0916065c-3d91-46ca-9e15-814b111901a8 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.800916] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore2] 48470f96-56d2-4ca2-8078-c5ff4f6db71b/48470f96-56d2-4ca2-8078-c5ff4f6db71b.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 838.808531] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Extending root virtual disk to 1048576 {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 838.808531] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6a229c15-37ae-4a4a-903f-c2ac9dbec6bf {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.812709] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf94b68b-c3a6-4247-9545-3e99b60dd407 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.821444] env[69367]: DEBUG oslo_vmware.api [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Waiting for the task: (returnval){ [ 838.821444] env[69367]: value = "task-4234036" [ 838.821444] env[69367]: _type = "Task" [ 838.821444] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.849589] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b60551b-b2c1-4966-acc0-ced0b8871cb0 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.861324] env[69367]: DEBUG oslo_vmware.api [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Task: {'id': task-4234036, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.861661] env[69367]: DEBUG oslo_vmware.api [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Task: {'id': task-4234032, 'name': PowerOnVM_Task, 'duration_secs': 1.005506} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.868095] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] [instance: c272b0ae-6313-46ab-977c-6de255e77675] Powered on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 838.868395] env[69367]: INFO nova.compute.manager [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] [instance: c272b0ae-6313-46ab-977c-6de255e77675] Took 5.79 seconds to spawn the instance on the hypervisor. [ 838.868631] env[69367]: DEBUG nova.compute.manager [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] [instance: c272b0ae-6313-46ab-977c-6de255e77675] Checking state {{(pid=69367) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 838.872290] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-359f099a-8637-45bd-a796-f6a916b0b2b7 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.877891] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d88f2416-452c-496c-886c-76ffbc15203e {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.886563] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234035, 'name': CreateVM_Task} progress is 99%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.924792] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180027MB free_disk=1GB free_vcpus=48 pci_devices=None {{(pid=69367) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 838.924971] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 839.088864] env[69367]: DEBUG oslo_concurrency.lockutils [req-c17b20af-37e2-4f42-9cb1-7a89d6c135ac req-2a277af3-2d06-4d39-9097-a4c8845642bc service nova] Releasing lock "refresh_cache-4a46d003-f57e-4089-aa60-757a4246f071" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 839.108312] env[69367]: DEBUG nova.network.neutron [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 46b6bc45-57f0-4850-9249-6bbb22b162c6] Successfully updated port: 4fc784f5-80ec-41ce-bb71-af0e71d38e84 {{(pid=69367) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 839.269937] env[69367]: DEBUG nova.compute.manager [req-d3f72b45-9843-4453-ae4d-1b6dcc9f5332 req-927c21d6-8931-4000-8452-ad69873472e2 service nova] [instance: 46b6bc45-57f0-4850-9249-6bbb22b162c6] Received event network-vif-plugged-4fc784f5-80ec-41ce-bb71-af0e71d38e84 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 839.270224] env[69367]: DEBUG oslo_concurrency.lockutils [req-d3f72b45-9843-4453-ae4d-1b6dcc9f5332 req-927c21d6-8931-4000-8452-ad69873472e2 service nova] Acquiring lock "46b6bc45-57f0-4850-9249-6bbb22b162c6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 839.270642] env[69367]: DEBUG oslo_concurrency.lockutils [req-d3f72b45-9843-4453-ae4d-1b6dcc9f5332 req-927c21d6-8931-4000-8452-ad69873472e2 service nova] Lock "46b6bc45-57f0-4850-9249-6bbb22b162c6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 839.270642] env[69367]: DEBUG oslo_concurrency.lockutils [req-d3f72b45-9843-4453-ae4d-1b6dcc9f5332 req-927c21d6-8931-4000-8452-ad69873472e2 service nova] Lock "46b6bc45-57f0-4850-9249-6bbb22b162c6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 839.270843] env[69367]: DEBUG nova.compute.manager [req-d3f72b45-9843-4453-ae4d-1b6dcc9f5332 req-927c21d6-8931-4000-8452-ad69873472e2 service nova] [instance: 46b6bc45-57f0-4850-9249-6bbb22b162c6] No waiting events found dispatching network-vif-plugged-4fc784f5-80ec-41ce-bb71-af0e71d38e84 {{(pid=69367) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 839.271127] env[69367]: WARNING nova.compute.manager [req-d3f72b45-9843-4453-ae4d-1b6dcc9f5332 req-927c21d6-8931-4000-8452-ad69873472e2 service nova] [instance: 46b6bc45-57f0-4850-9249-6bbb22b162c6] Received unexpected event network-vif-plugged-4fc784f5-80ec-41ce-bb71-af0e71d38e84 for instance with vm_state building and task_state spawning. [ 839.271393] env[69367]: DEBUG nova.compute.manager [req-d3f72b45-9843-4453-ae4d-1b6dcc9f5332 req-927c21d6-8931-4000-8452-ad69873472e2 service nova] [instance: 46b6bc45-57f0-4850-9249-6bbb22b162c6] Received event network-changed-4fc784f5-80ec-41ce-bb71-af0e71d38e84 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 839.271616] env[69367]: DEBUG nova.compute.manager [req-d3f72b45-9843-4453-ae4d-1b6dcc9f5332 req-927c21d6-8931-4000-8452-ad69873472e2 service nova] [instance: 46b6bc45-57f0-4850-9249-6bbb22b162c6] Refreshing instance network info cache due to event network-changed-4fc784f5-80ec-41ce-bb71-af0e71d38e84. {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11772}} [ 839.271849] env[69367]: DEBUG oslo_concurrency.lockutils [req-d3f72b45-9843-4453-ae4d-1b6dcc9f5332 req-927c21d6-8931-4000-8452-ad69873472e2 service nova] Acquiring lock "refresh_cache-46b6bc45-57f0-4850-9249-6bbb22b162c6" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 839.272099] env[69367]: DEBUG oslo_concurrency.lockutils [req-d3f72b45-9843-4453-ae4d-1b6dcc9f5332 req-927c21d6-8931-4000-8452-ad69873472e2 service nova] Acquired lock "refresh_cache-46b6bc45-57f0-4850-9249-6bbb22b162c6" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 839.272291] env[69367]: DEBUG nova.network.neutron [req-d3f72b45-9843-4453-ae4d-1b6dcc9f5332 req-927c21d6-8931-4000-8452-ad69873472e2 service nova] [instance: 46b6bc45-57f0-4850-9249-6bbb22b162c6] Refreshing network info cache for port 4fc784f5-80ec-41ce-bb71-af0e71d38e84 {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 839.296610] env[69367]: DEBUG nova.compute.utils [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Using /dev/sd instead of None {{(pid=69367) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 839.298227] env[69367]: DEBUG nova.compute.manager [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 54a1f586-481d-427e-ba0b-be90e5573bd3] Allocating IP information in the background. {{(pid=69367) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 839.298508] env[69367]: DEBUG nova.network.neutron [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 54a1f586-481d-427e-ba0b-be90e5573bd3] allocate_for_instance() {{(pid=69367) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 839.301246] env[69367]: DEBUG nova.compute.manager [None req-ab1bccff-30fc-46dc-87b0-e255eed0c66e tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [instance: 4901e02d-c55c-4c27-8d5a-e48c7e83aaa9] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 839.332689] env[69367]: DEBUG oslo_vmware.api [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Task: {'id': task-4234036, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078374} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.332983] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Extended root virtual disk {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 839.333827] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d6ad94a-58a2-47b7-a7bd-f011ac3d9553 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.364777] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Reconfiguring VM instance instance-00000048 to attach disk [datastore2] 48470f96-56d2-4ca2-8078-c5ff4f6db71b/48470f96-56d2-4ca2-8078-c5ff4f6db71b.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 839.366766] env[69367]: DEBUG nova.policy [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8b3d86a66b654d4fbcc4c4cab4c0ad75', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cfbdfc3a96db40c8a3e14c797422f08e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69367) authorize /opt/stack/nova/nova/policy.py:192}} [ 839.368755] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cedc6213-ce15-40ec-ad30-49f5c3c759c8 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.394896] env[69367]: DEBUG oslo_vmware.api [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Waiting for the task: (returnval){ [ 839.394896] env[69367]: value = "task-4234037" [ 839.394896] env[69367]: _type = "Task" [ 839.394896] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.401089] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234035, 'name': CreateVM_Task, 'duration_secs': 0.570935} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.403787] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4a46d003-f57e-4089-aa60-757a4246f071] Created VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 839.403991] env[69367]: DEBUG oslo_concurrency.lockutils [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 839.404197] env[69367]: DEBUG oslo_concurrency.lockutils [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 839.404554] env[69367]: DEBUG oslo_concurrency.lockutils [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 839.405545] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-760a7d3f-5f4a-48ed-9581-9d7c79bc86a2 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.417043] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Waiting for the task: (returnval){ [ 839.417043] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52131309-aadc-d397-1259-3d13b9b52115" [ 839.417043] env[69367]: _type = "Task" [ 839.417043] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.423370] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52131309-aadc-d397-1259-3d13b9b52115, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.436445] env[69367]: INFO nova.compute.manager [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] [instance: c272b0ae-6313-46ab-977c-6de255e77675] Took 30.76 seconds to build instance. [ 839.595778] env[69367]: DEBUG nova.compute.manager [None req-54e245f1-820f-4615-9485-964f0c04f9a5 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Checking state {{(pid=69367) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 839.596745] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cdbb369-2f8f-464d-abdf-97e660e4c647 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.611631] env[69367]: DEBUG oslo_concurrency.lockutils [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Acquiring lock "refresh_cache-46b6bc45-57f0-4850-9249-6bbb22b162c6" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 839.750948] env[69367]: DEBUG nova.network.neutron [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 54a1f586-481d-427e-ba0b-be90e5573bd3] Successfully created port: 2217ec6c-a6da-4c26-b9de-53239e598080 {{(pid=69367) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 839.807025] env[69367]: DEBUG nova.compute.manager [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 54a1f586-481d-427e-ba0b-be90e5573bd3] Start building block device mappings for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 839.835608] env[69367]: DEBUG oslo_concurrency.lockutils [None req-ab1bccff-30fc-46dc-87b0-e255eed0c66e tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 839.837842] env[69367]: DEBUG nova.network.neutron [req-d3f72b45-9843-4453-ae4d-1b6dcc9f5332 req-927c21d6-8931-4000-8452-ad69873472e2 service nova] [instance: 46b6bc45-57f0-4850-9249-6bbb22b162c6] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 839.922261] env[69367]: DEBUG oslo_vmware.api [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Task: {'id': task-4234037, 'name': ReconfigVM_Task, 'duration_secs': 0.31037} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.929572] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Reconfigured VM instance instance-00000048 to attach disk [datastore2] 48470f96-56d2-4ca2-8078-c5ff4f6db71b/48470f96-56d2-4ca2-8078-c5ff4f6db71b.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 839.933264] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a6a17101-6145-4abd-ac7f-13d261fbb855 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.938193] env[69367]: DEBUG oslo_concurrency.lockutils [None req-44b524e7-7df1-4bb0-9d49-19b159dcb8ba tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Lock "c272b0ae-6313-46ab-977c-6de255e77675" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 44.690s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 839.949461] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52131309-aadc-d397-1259-3d13b9b52115, 'name': SearchDatastore_Task, 'duration_secs': 0.010003} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.955023] env[69367]: DEBUG oslo_concurrency.lockutils [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 839.956040] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 4a46d003-f57e-4089-aa60-757a4246f071] Processing image 2b099420-9152-4d93-9609-4c9317824c11 {{(pid=69367) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 839.956040] env[69367]: DEBUG oslo_concurrency.lockutils [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 839.956040] env[69367]: DEBUG oslo_concurrency.lockutils [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 839.956219] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 839.956722] env[69367]: DEBUG oslo_vmware.api [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Waiting for the task: (returnval){ [ 839.956722] env[69367]: value = "task-4234038" [ 839.956722] env[69367]: _type = "Task" [ 839.956722] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.957203] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dae56e69-1933-4c6c-89bf-ec5e75d21338 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.971268] env[69367]: DEBUG oslo_vmware.api [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Task: {'id': task-4234038, 'name': Rename_Task} progress is 14%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.974475] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 839.974663] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69367) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 839.976562] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7721f779-496f-4262-8451-a5998012b743 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.983026] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Waiting for the task: (returnval){ [ 839.983026] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52742d6a-9c5c-1605-6652-01e5249f5791" [ 839.983026] env[69367]: _type = "Task" [ 839.983026] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.997483] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52742d6a-9c5c-1605-6652-01e5249f5791, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.053637] env[69367]: DEBUG nova.network.neutron [req-d3f72b45-9843-4453-ae4d-1b6dcc9f5332 req-927c21d6-8931-4000-8452-ad69873472e2 service nova] [instance: 46b6bc45-57f0-4850-9249-6bbb22b162c6] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 840.136062] env[69367]: INFO nova.compute.manager [None req-54e245f1-820f-4615-9485-964f0c04f9a5 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] instance snapshotting [ 840.136062] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07cbdfe2-0db5-4247-a6c2-9e8643026edc {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.175269] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd811306-59ba-4e51-bc3c-d9058459f920 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.368951] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9278a41-4187-4903-8274-744dc5acd960 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.379299] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4ff4e76-8062-4d1b-8c7a-23c28ca4ad5e {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.412842] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64b478d5-ac14-460c-8f1a-20cdf84e07d7 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.421414] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28d0dac8-4434-44b9-a1ef-f7203ec3c134 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.437278] env[69367]: DEBUG nova.compute.provider_tree [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Inventory has not changed in ProviderTree for provider: 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 840.455377] env[69367]: DEBUG nova.compute.manager [None req-0efedbe2-1e1f-4c5c-8e21-05ff0a267fd9 tempest-ServerActionsTestOtherB-179286054 tempest-ServerActionsTestOtherB-179286054-project-member] [instance: 2b2a47ca-47d7-43bb-80cd-801e08f327ec] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 840.470647] env[69367]: DEBUG oslo_vmware.api [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Task: {'id': task-4234038, 'name': Rename_Task} progress is 99%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.493153] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52742d6a-9c5c-1605-6652-01e5249f5791, 'name': SearchDatastore_Task, 'duration_secs': 0.015914} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.494580] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7039ee21-2610-4eda-874b-aab0f01855e8 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.500842] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Waiting for the task: (returnval){ [ 840.500842] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52fa0fe1-2b23-d9ba-e3ca-cbe63d855908" [ 840.500842] env[69367]: _type = "Task" [ 840.500842] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.509211] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52fa0fe1-2b23-d9ba-e3ca-cbe63d855908, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.534034] env[69367]: INFO nova.compute.manager [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] [instance: c272b0ae-6313-46ab-977c-6de255e77675] Rebuilding instance [ 840.557051] env[69367]: DEBUG oslo_concurrency.lockutils [req-d3f72b45-9843-4453-ae4d-1b6dcc9f5332 req-927c21d6-8931-4000-8452-ad69873472e2 service nova] Releasing lock "refresh_cache-46b6bc45-57f0-4850-9249-6bbb22b162c6" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 840.557469] env[69367]: DEBUG oslo_concurrency.lockutils [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Acquired lock "refresh_cache-46b6bc45-57f0-4850-9249-6bbb22b162c6" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 840.557658] env[69367]: DEBUG nova.network.neutron [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 46b6bc45-57f0-4850-9249-6bbb22b162c6] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 840.576350] env[69367]: DEBUG nova.compute.manager [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] [instance: c272b0ae-6313-46ab-977c-6de255e77675] Checking state {{(pid=69367) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 840.577205] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acc5b0bb-f97b-4264-8ce3-82b6217d5e5f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.689981] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-54e245f1-820f-4615-9485-964f0c04f9a5 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Creating Snapshot of the VM instance {{(pid=69367) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 840.690238] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-f13862e2-b91a-44a3-9ba4-3f331b6bf39d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.699103] env[69367]: DEBUG oslo_vmware.api [None req-54e245f1-820f-4615-9485-964f0c04f9a5 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Waiting for the task: (returnval){ [ 840.699103] env[69367]: value = "task-4234039" [ 840.699103] env[69367]: _type = "Task" [ 840.699103] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.708873] env[69367]: DEBUG oslo_vmware.api [None req-54e245f1-820f-4615-9485-964f0c04f9a5 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Task: {'id': task-4234039, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.823179] env[69367]: DEBUG nova.compute.manager [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 54a1f586-481d-427e-ba0b-be90e5573bd3] Start spawning the instance on the hypervisor. {{(pid=69367) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 840.848016] env[69367]: DEBUG nova.virt.hardware [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-19T12:49:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-19T12:49:28Z,direct_url=,disk_format='vmdk',id=2b099420-9152-4d93-9609-4c9317824c11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cd7c200d5cd6461fb951580f8c764c42',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-19T12:49:29Z,virtual_size=,visibility=), allow threads: False {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 840.848016] env[69367]: DEBUG nova.virt.hardware [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Flavor limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 840.848016] env[69367]: DEBUG nova.virt.hardware [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Image limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 840.848016] env[69367]: DEBUG nova.virt.hardware [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Flavor pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 840.848016] env[69367]: DEBUG nova.virt.hardware [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Image pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 840.849044] env[69367]: DEBUG nova.virt.hardware [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 840.849044] env[69367]: DEBUG nova.virt.hardware [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 840.849044] env[69367]: DEBUG nova.virt.hardware [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 840.849044] env[69367]: DEBUG nova.virt.hardware [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Got 1 possible topologies {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 840.849394] env[69367]: DEBUG nova.virt.hardware [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 840.849656] env[69367]: DEBUG nova.virt.hardware [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 840.850651] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-549e1b5c-c015-406c-9b11-2e0b467da990 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.859522] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cfb6017-d45f-4e19-998f-140e81b5ef41 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.940750] env[69367]: DEBUG nova.scheduler.client.report [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Inventory has not changed for provider 19ddf8be-7305-4f70-8366-52a9957232e6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 840.977247] env[69367]: DEBUG oslo_vmware.api [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Task: {'id': task-4234038, 'name': Rename_Task} progress is 99%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.984407] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0efedbe2-1e1f-4c5c-8e21-05ff0a267fd9 tempest-ServerActionsTestOtherB-179286054 tempest-ServerActionsTestOtherB-179286054-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 841.012024] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52fa0fe1-2b23-d9ba-e3ca-cbe63d855908, 'name': SearchDatastore_Task, 'duration_secs': 0.010604} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.012457] env[69367]: DEBUG oslo_concurrency.lockutils [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 841.012766] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore2] 4a46d003-f57e-4089-aa60-757a4246f071/4a46d003-f57e-4089-aa60-757a4246f071.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 841.013122] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-991c73e0-9205-4f30-9f5b-cbd77cb72237 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.021782] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Waiting for the task: (returnval){ [ 841.021782] env[69367]: value = "task-4234040" [ 841.021782] env[69367]: _type = "Task" [ 841.021782] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.030620] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Task: {'id': task-4234040, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.102716] env[69367]: DEBUG nova.network.neutron [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 46b6bc45-57f0-4850-9249-6bbb22b162c6] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 841.209499] env[69367]: DEBUG oslo_vmware.api [None req-54e245f1-820f-4615-9485-964f0c04f9a5 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Task: {'id': task-4234039, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.354277] env[69367]: DEBUG nova.network.neutron [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 46b6bc45-57f0-4850-9249-6bbb22b162c6] Updating instance_info_cache with network_info: [{"id": "4fc784f5-80ec-41ce-bb71-af0e71d38e84", "address": "fa:16:3e:16:21:21", "network": {"id": "55efdf13-76a4-4190-9a6e-e890b3e8cc39", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1839781723-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cfbdfc3a96db40c8a3e14c797422f08e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa0d7fd6-4fd3-4451-9208-8a3cf25e30e4", "external-id": "nsx-vlan-transportzone-545", "segmentation_id": 545, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4fc784f5-80", "ovs_interfaceid": "4fc784f5-80ec-41ce-bb71-af0e71d38e84", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 841.445800] env[69367]: DEBUG oslo_concurrency.lockutils [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.659s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 841.446352] env[69367]: DEBUG nova.compute.manager [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] [instance: d900df05-b65c-4a45-94d1-563afbf9c022] Start building networks asynchronously for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 841.449138] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.512s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 841.450756] env[69367]: INFO nova.compute.claims [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 841.477446] env[69367]: DEBUG oslo_vmware.api [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Task: {'id': task-4234038, 'name': Rename_Task, 'duration_secs': 1.208099} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.477838] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Powering on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 841.478219] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7970f93c-b40c-4f51-808a-c4d8e77cbfd7 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.488268] env[69367]: DEBUG oslo_vmware.api [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Waiting for the task: (returnval){ [ 841.488268] env[69367]: value = "task-4234041" [ 841.488268] env[69367]: _type = "Task" [ 841.488268] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.509358] env[69367]: DEBUG oslo_vmware.api [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Task: {'id': task-4234041, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.514300] env[69367]: DEBUG nova.compute.manager [req-fbef0358-7bc6-46f0-bc42-8900d0ddb3a9 req-b4c53bd5-4068-4700-877d-c2589b309269 service nova] [instance: 54a1f586-481d-427e-ba0b-be90e5573bd3] Received event network-vif-plugged-2217ec6c-a6da-4c26-b9de-53239e598080 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 841.514300] env[69367]: DEBUG oslo_concurrency.lockutils [req-fbef0358-7bc6-46f0-bc42-8900d0ddb3a9 req-b4c53bd5-4068-4700-877d-c2589b309269 service nova] Acquiring lock "54a1f586-481d-427e-ba0b-be90e5573bd3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 841.514300] env[69367]: DEBUG oslo_concurrency.lockutils [req-fbef0358-7bc6-46f0-bc42-8900d0ddb3a9 req-b4c53bd5-4068-4700-877d-c2589b309269 service nova] Lock "54a1f586-481d-427e-ba0b-be90e5573bd3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 841.514300] env[69367]: DEBUG oslo_concurrency.lockutils [req-fbef0358-7bc6-46f0-bc42-8900d0ddb3a9 req-b4c53bd5-4068-4700-877d-c2589b309269 service nova] Lock "54a1f586-481d-427e-ba0b-be90e5573bd3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 841.514420] env[69367]: DEBUG nova.compute.manager [req-fbef0358-7bc6-46f0-bc42-8900d0ddb3a9 req-b4c53bd5-4068-4700-877d-c2589b309269 service nova] [instance: 54a1f586-481d-427e-ba0b-be90e5573bd3] No waiting events found dispatching network-vif-plugged-2217ec6c-a6da-4c26-b9de-53239e598080 {{(pid=69367) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 841.514654] env[69367]: WARNING nova.compute.manager [req-fbef0358-7bc6-46f0-bc42-8900d0ddb3a9 req-b4c53bd5-4068-4700-877d-c2589b309269 service nova] [instance: 54a1f586-481d-427e-ba0b-be90e5573bd3] Received unexpected event network-vif-plugged-2217ec6c-a6da-4c26-b9de-53239e598080 for instance with vm_state building and task_state spawning. [ 841.537033] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Task: {'id': task-4234040, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.595030] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] [instance: c272b0ae-6313-46ab-977c-6de255e77675] Powering off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 841.595030] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b5f16dd4-e664-4a63-be96-db6cbdcee16d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.600388] env[69367]: DEBUG oslo_vmware.api [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Waiting for the task: (returnval){ [ 841.600388] env[69367]: value = "task-4234042" [ 841.600388] env[69367]: _type = "Task" [ 841.600388] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.609451] env[69367]: DEBUG oslo_vmware.api [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Task: {'id': task-4234042, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.615590] env[69367]: DEBUG nova.network.neutron [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 54a1f586-481d-427e-ba0b-be90e5573bd3] Successfully updated port: 2217ec6c-a6da-4c26-b9de-53239e598080 {{(pid=69367) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 841.710495] env[69367]: DEBUG oslo_vmware.api [None req-54e245f1-820f-4615-9485-964f0c04f9a5 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Task: {'id': task-4234039, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.857693] env[69367]: DEBUG oslo_concurrency.lockutils [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Releasing lock "refresh_cache-46b6bc45-57f0-4850-9249-6bbb22b162c6" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 841.858093] env[69367]: DEBUG nova.compute.manager [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 46b6bc45-57f0-4850-9249-6bbb22b162c6] Instance network_info: |[{"id": "4fc784f5-80ec-41ce-bb71-af0e71d38e84", "address": "fa:16:3e:16:21:21", "network": {"id": "55efdf13-76a4-4190-9a6e-e890b3e8cc39", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1839781723-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cfbdfc3a96db40c8a3e14c797422f08e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa0d7fd6-4fd3-4451-9208-8a3cf25e30e4", "external-id": "nsx-vlan-transportzone-545", "segmentation_id": 545, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4fc784f5-80", "ovs_interfaceid": "4fc784f5-80ec-41ce-bb71-af0e71d38e84", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69367) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 841.858583] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 46b6bc45-57f0-4850-9249-6bbb22b162c6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:16:21:21', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aa0d7fd6-4fd3-4451-9208-8a3cf25e30e4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4fc784f5-80ec-41ce-bb71-af0e71d38e84', 'vif_model': 'vmxnet3'}] {{(pid=69367) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 841.866862] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 841.867069] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 46b6bc45-57f0-4850-9249-6bbb22b162c6] Creating VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 841.867285] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e62cf6ce-31f1-46d8-a0f4-f14e026976f1 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.889928] env[69367]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 841.889928] env[69367]: value = "task-4234043" [ 841.889928] env[69367]: _type = "Task" [ 841.889928] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.898872] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234043, 'name': CreateVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.957035] env[69367]: DEBUG nova.compute.utils [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Using /dev/sd instead of None {{(pid=69367) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 841.961126] env[69367]: DEBUG nova.compute.manager [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] [instance: d900df05-b65c-4a45-94d1-563afbf9c022] Allocating IP information in the background. {{(pid=69367) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 841.961395] env[69367]: DEBUG nova.network.neutron [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] [instance: d900df05-b65c-4a45-94d1-563afbf9c022] allocate_for_instance() {{(pid=69367) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 841.999711] env[69367]: DEBUG oslo_vmware.api [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Task: {'id': task-4234041, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.014544] env[69367]: DEBUG nova.policy [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e5b8da7ebbbb4f1e9f75c3c47e91ec9e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '294b002e61984ba1a746b51c95e2af4c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69367) authorize /opt/stack/nova/nova/policy.py:192}} [ 842.033631] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Task: {'id': task-4234040, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.557925} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.034221] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore2] 4a46d003-f57e-4089-aa60-757a4246f071/4a46d003-f57e-4089-aa60-757a4246f071.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 842.034221] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 4a46d003-f57e-4089-aa60-757a4246f071] Extending root virtual disk to 1048576 {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 842.034449] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3f0721a5-48c5-4490-8d42-2e8c7cc1fa13 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.043368] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Waiting for the task: (returnval){ [ 842.043368] env[69367]: value = "task-4234044" [ 842.043368] env[69367]: _type = "Task" [ 842.043368] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.052831] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Task: {'id': task-4234044, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.113868] env[69367]: DEBUG oslo_vmware.api [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Task: {'id': task-4234042, 'name': PowerOffVM_Task, 'duration_secs': 0.153083} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.114176] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] [instance: c272b0ae-6313-46ab-977c-6de255e77675] Powered off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 842.114938] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] [instance: c272b0ae-6313-46ab-977c-6de255e77675] Destroying instance {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 842.115768] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-572d0bc4-bf78-45c4-8d36-886d3bbcaf79 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.118823] env[69367]: DEBUG oslo_concurrency.lockutils [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Acquiring lock "refresh_cache-54a1f586-481d-427e-ba0b-be90e5573bd3" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 842.119033] env[69367]: DEBUG oslo_concurrency.lockutils [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Acquired lock "refresh_cache-54a1f586-481d-427e-ba0b-be90e5573bd3" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 842.119218] env[69367]: DEBUG nova.network.neutron [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 54a1f586-481d-427e-ba0b-be90e5573bd3] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 842.126368] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] [instance: c272b0ae-6313-46ab-977c-6de255e77675] Unregistering the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 842.126368] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-623d2311-a82d-4725-bd5b-0f283961fd82 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.161961] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] [instance: c272b0ae-6313-46ab-977c-6de255e77675] Unregistered the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 842.162229] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] [instance: c272b0ae-6313-46ab-977c-6de255e77675] Deleting contents of the VM from datastore datastore2 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 842.162518] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Deleting the datastore file [datastore2] c272b0ae-6313-46ab-977c-6de255e77675 {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 842.162805] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c0ad70a2-84bd-4242-820a-0eb4395dd3d8 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.171702] env[69367]: DEBUG oslo_vmware.api [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Waiting for the task: (returnval){ [ 842.171702] env[69367]: value = "task-4234046" [ 842.171702] env[69367]: _type = "Task" [ 842.171702] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.181430] env[69367]: DEBUG oslo_vmware.api [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Task: {'id': task-4234046, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.212763] env[69367]: DEBUG oslo_vmware.api [None req-54e245f1-820f-4615-9485-964f0c04f9a5 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Task: {'id': task-4234039, 'name': CreateSnapshot_Task, 'duration_secs': 1.070432} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.214104] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-54e245f1-820f-4615-9485-964f0c04f9a5 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Created Snapshot of the VM instance {{(pid=69367) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 842.215937] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f93cfa17-bb30-4172-a4aa-2ce01602fb4d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.374818] env[69367]: DEBUG nova.network.neutron [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] [instance: d900df05-b65c-4a45-94d1-563afbf9c022] Successfully created port: 8ae37b9e-8ee4-4b68-b70f-46238b3bc14e {{(pid=69367) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 842.400402] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234043, 'name': CreateVM_Task, 'duration_secs': 0.501165} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.400597] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 46b6bc45-57f0-4850-9249-6bbb22b162c6] Created VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 842.401334] env[69367]: DEBUG oslo_concurrency.lockutils [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 842.401532] env[69367]: DEBUG oslo_concurrency.lockutils [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 842.401853] env[69367]: DEBUG oslo_concurrency.lockutils [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 842.402176] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1805936f-178d-4d27-ae32-76be201aafef {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.407446] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Waiting for the task: (returnval){ [ 842.407446] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52c23661-2730-fe8a-77b8-445b9869592f" [ 842.407446] env[69367]: _type = "Task" [ 842.407446] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.417627] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52c23661-2730-fe8a-77b8-445b9869592f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.468333] env[69367]: DEBUG nova.compute.manager [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] [instance: d900df05-b65c-4a45-94d1-563afbf9c022] Start building block device mappings for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 842.500704] env[69367]: DEBUG oslo_vmware.api [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Task: {'id': task-4234041, 'name': PowerOnVM_Task, 'duration_secs': 0.541447} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.501507] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Powered on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 842.501846] env[69367]: INFO nova.compute.manager [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Took 14.39 seconds to spawn the instance on the hypervisor. [ 842.502387] env[69367]: DEBUG nova.compute.manager [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Checking state {{(pid=69367) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 842.504341] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-838ba783-eeeb-40ef-b53f-dfecd923c061 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.556308] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Task: {'id': task-4234044, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.095918} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.556672] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 4a46d003-f57e-4089-aa60-757a4246f071] Extended root virtual disk {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 842.557567] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a5dbd99-2a91-45de-aeaf-6a6cc522ba0a {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.586550] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 4a46d003-f57e-4089-aa60-757a4246f071] Reconfiguring VM instance instance-0000004b to attach disk [datastore2] 4a46d003-f57e-4089-aa60-757a4246f071/4a46d003-f57e-4089-aa60-757a4246f071.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 842.589561] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d87811f7-0ad4-4632-b756-31bd47d6f898 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.610940] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Waiting for the task: (returnval){ [ 842.610940] env[69367]: value = "task-4234047" [ 842.610940] env[69367]: _type = "Task" [ 842.610940] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.619568] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Task: {'id': task-4234047, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.660168] env[69367]: DEBUG nova.network.neutron [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 54a1f586-481d-427e-ba0b-be90e5573bd3] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 842.685188] env[69367]: DEBUG oslo_vmware.api [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Task: {'id': task-4234046, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.390523} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.685513] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Deleted the datastore file {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 842.685699] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] [instance: c272b0ae-6313-46ab-977c-6de255e77675] Deleted contents of the VM from datastore datastore2 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 842.685875] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] [instance: c272b0ae-6313-46ab-977c-6de255e77675] Instance destroyed {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 842.738847] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-54e245f1-820f-4615-9485-964f0c04f9a5 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Creating linked-clone VM from snapshot {{(pid=69367) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 842.739693] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-5974350a-5078-417b-bcd6-17705a3c135a {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.555737] env[69367]: INFO nova.compute.manager [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Took 42.19 seconds to build instance. [ 843.563784] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52c23661-2730-fe8a-77b8-445b9869592f, 'name': SearchDatastore_Task, 'duration_secs': 0.074091} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.563975] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Task: {'id': task-4234047, 'name': ReconfigVM_Task, 'duration_secs': 0.679654} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.565411] env[69367]: DEBUG oslo_concurrency.lockutils [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 843.565662] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 46b6bc45-57f0-4850-9249-6bbb22b162c6] Processing image 2b099420-9152-4d93-9609-4c9317824c11 {{(pid=69367) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 843.565911] env[69367]: DEBUG oslo_concurrency.lockutils [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 843.566080] env[69367]: DEBUG oslo_concurrency.lockutils [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 843.566266] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 843.566562] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 4a46d003-f57e-4089-aa60-757a4246f071] Reconfigured VM instance instance-0000004b to attach disk [datastore2] 4a46d003-f57e-4089-aa60-757a4246f071/4a46d003-f57e-4089-aa60-757a4246f071.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 843.567262] env[69367]: DEBUG oslo_vmware.api [None req-54e245f1-820f-4615-9485-964f0c04f9a5 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Waiting for the task: (returnval){ [ 843.567262] env[69367]: value = "task-4234048" [ 843.567262] env[69367]: _type = "Task" [ 843.567262] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.570035] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-90dba177-56d9-4566-8a41-a28c44f870fd {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.572114] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-78b8bee1-c38a-4935-a5d1-135cf9a7e0fe {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.584746] env[69367]: DEBUG oslo_vmware.api [None req-54e245f1-820f-4615-9485-964f0c04f9a5 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Task: {'id': task-4234048, 'name': CloneVM_Task} progress is 10%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.586760] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Waiting for the task: (returnval){ [ 843.586760] env[69367]: value = "task-4234049" [ 843.586760] env[69367]: _type = "Task" [ 843.586760] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.588152] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 843.588344] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69367) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 843.592017] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-224ac8ea-b4e4-4f05-929c-d981de20d52c {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.604023] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Waiting for the task: (returnval){ [ 843.604023] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]5283da47-1404-c1e4-d7dd-6f412d5c9af3" [ 843.604023] env[69367]: _type = "Task" [ 843.604023] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.608184] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Task: {'id': task-4234049, 'name': Rename_Task} progress is 14%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.620529] env[69367]: DEBUG nova.compute.manager [req-9a8d4b60-4591-4e9a-b3e7-dd3560df4646 req-1b57f1e6-55e6-41a8-a57a-7788d86a978a service nova] [instance: 54a1f586-481d-427e-ba0b-be90e5573bd3] Received event network-changed-2217ec6c-a6da-4c26-b9de-53239e598080 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 843.620850] env[69367]: DEBUG nova.compute.manager [req-9a8d4b60-4591-4e9a-b3e7-dd3560df4646 req-1b57f1e6-55e6-41a8-a57a-7788d86a978a service nova] [instance: 54a1f586-481d-427e-ba0b-be90e5573bd3] Refreshing instance network info cache due to event network-changed-2217ec6c-a6da-4c26-b9de-53239e598080. {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11772}} [ 843.621152] env[69367]: DEBUG oslo_concurrency.lockutils [req-9a8d4b60-4591-4e9a-b3e7-dd3560df4646 req-1b57f1e6-55e6-41a8-a57a-7788d86a978a service nova] Acquiring lock "refresh_cache-54a1f586-481d-427e-ba0b-be90e5573bd3" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 843.625754] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]5283da47-1404-c1e4-d7dd-6f412d5c9af3, 'name': SearchDatastore_Task, 'duration_secs': 0.014848} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.626580] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-76f99652-1464-47fb-9b1c-8ede7feeb3d1 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.635925] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Waiting for the task: (returnval){ [ 843.635925] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]5260eaf7-599b-719a-12ce-437099e2d4cc" [ 843.635925] env[69367]: _type = "Task" [ 843.635925] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.645911] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]5260eaf7-599b-719a-12ce-437099e2d4cc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.673661] env[69367]: DEBUG nova.network.neutron [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 54a1f586-481d-427e-ba0b-be90e5573bd3] Updating instance_info_cache with network_info: [{"id": "2217ec6c-a6da-4c26-b9de-53239e598080", "address": "fa:16:3e:ee:cb:24", "network": {"id": "55efdf13-76a4-4190-9a6e-e890b3e8cc39", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1839781723-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cfbdfc3a96db40c8a3e14c797422f08e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa0d7fd6-4fd3-4451-9208-8a3cf25e30e4", "external-id": "nsx-vlan-transportzone-545", "segmentation_id": 545, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2217ec6c-a6", "ovs_interfaceid": "2217ec6c-a6da-4c26-b9de-53239e598080", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 843.797025] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19f269ac-530c-403e-bf09-ef9906d5c851 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.805348] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f52ab84e-2e74-42e7-85e3-d1e099577d07 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.840253] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e17d3ce9-ac63-4ef4-9346-760ba1f89d85 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.848850] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b0d8c01-740b-4e4f-953d-20c27be1cb45 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.863918] env[69367]: DEBUG nova.compute.provider_tree [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Inventory has not changed in ProviderTree for provider: 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 844.037699] env[69367]: DEBUG nova.network.neutron [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] [instance: d900df05-b65c-4a45-94d1-563afbf9c022] Successfully updated port: 8ae37b9e-8ee4-4b68-b70f-46238b3bc14e {{(pid=69367) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 844.047710] env[69367]: DEBUG nova.compute.manager [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] [instance: d900df05-b65c-4a45-94d1-563afbf9c022] Start spawning the instance on the hypervisor. {{(pid=69367) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 844.058112] env[69367]: DEBUG oslo_concurrency.lockutils [None req-361d6c1a-76ca-4e60-9627-03c3bebad406 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Lock "48470f96-56d2-4ca2-8078-c5ff4f6db71b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 65.960s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 844.084130] env[69367]: DEBUG nova.virt.hardware [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-19T12:49:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-19T12:49:28Z,direct_url=,disk_format='vmdk',id=2b099420-9152-4d93-9609-4c9317824c11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cd7c200d5cd6461fb951580f8c764c42',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-19T12:49:29Z,virtual_size=,visibility=), allow threads: False {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 844.084523] env[69367]: DEBUG nova.virt.hardware [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Flavor limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 844.084800] env[69367]: DEBUG nova.virt.hardware [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Image limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 844.085109] env[69367]: DEBUG nova.virt.hardware [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Flavor pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 844.085409] env[69367]: DEBUG nova.virt.hardware [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Image pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 844.085596] env[69367]: DEBUG nova.virt.hardware [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 844.085915] env[69367]: DEBUG nova.virt.hardware [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 844.086195] env[69367]: DEBUG nova.virt.hardware [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 844.086443] env[69367]: DEBUG nova.virt.hardware [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Got 1 possible topologies {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 844.086668] env[69367]: DEBUG nova.virt.hardware [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 844.087402] env[69367]: DEBUG nova.virt.hardware [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 844.090216] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80bad780-520e-4518-b3c8-e5b967011e8e {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.103177] env[69367]: DEBUG oslo_vmware.api [None req-54e245f1-820f-4615-9485-964f0c04f9a5 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Task: {'id': task-4234048, 'name': CloneVM_Task} progress is 94%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.105778] env[69367]: DEBUG nova.virt.hardware [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-19T12:49:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-19T12:49:28Z,direct_url=,disk_format='vmdk',id=2b099420-9152-4d93-9609-4c9317824c11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cd7c200d5cd6461fb951580f8c764c42',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-19T12:49:29Z,virtual_size=,visibility=), allow threads: False {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 844.106085] env[69367]: DEBUG nova.virt.hardware [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Flavor limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 844.106279] env[69367]: DEBUG nova.virt.hardware [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Image limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 844.106506] env[69367]: DEBUG nova.virt.hardware [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Flavor pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 844.106698] env[69367]: DEBUG nova.virt.hardware [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Image pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 844.106877] env[69367]: DEBUG nova.virt.hardware [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 844.107157] env[69367]: DEBUG nova.virt.hardware [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 844.107385] env[69367]: DEBUG nova.virt.hardware [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 844.107665] env[69367]: DEBUG nova.virt.hardware [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Got 1 possible topologies {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 844.107910] env[69367]: DEBUG nova.virt.hardware [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 844.108283] env[69367]: DEBUG nova.virt.hardware [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 844.110147] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00bb7220-335e-4a78-ae8f-4b4ab13f88a5 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.120166] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Task: {'id': task-4234049, 'name': Rename_Task, 'duration_secs': 0.320713} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.124022] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a5fb6f9-4279-4305-abfc-ca976f49cda8 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.126646] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 4a46d003-f57e-4089-aa60-757a4246f071] Powering on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 844.129371] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a9f6a2be-5966-4f2f-9d3b-7881dcf509d3 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.133152] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96107d7c-b4a6-4b4b-a2e4-2bb36d7b2788 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.154317] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Waiting for the task: (returnval){ [ 844.154317] env[69367]: value = "task-4234050" [ 844.154317] env[69367]: _type = "Task" [ 844.154317] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.164268] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] [instance: c272b0ae-6313-46ab-977c-6de255e77675] Instance VIF info [] {{(pid=69367) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 844.170484] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 844.182583] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c272b0ae-6313-46ab-977c-6de255e77675] Creating VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 844.182583] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]5260eaf7-599b-719a-12ce-437099e2d4cc, 'name': SearchDatastore_Task, 'duration_secs': 0.011434} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.182852] env[69367]: DEBUG oslo_concurrency.lockutils [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Releasing lock "refresh_cache-54a1f586-481d-427e-ba0b-be90e5573bd3" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 844.183035] env[69367]: DEBUG nova.compute.manager [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 54a1f586-481d-427e-ba0b-be90e5573bd3] Instance network_info: |[{"id": "2217ec6c-a6da-4c26-b9de-53239e598080", "address": "fa:16:3e:ee:cb:24", "network": {"id": "55efdf13-76a4-4190-9a6e-e890b3e8cc39", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1839781723-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cfbdfc3a96db40c8a3e14c797422f08e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa0d7fd6-4fd3-4451-9208-8a3cf25e30e4", "external-id": "nsx-vlan-transportzone-545", "segmentation_id": 545, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2217ec6c-a6", "ovs_interfaceid": "2217ec6c-a6da-4c26-b9de-53239e598080", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69367) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 844.183701] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e843bcb1-c36e-40c6-9e83-586a237530d1 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.197602] env[69367]: DEBUG oslo_concurrency.lockutils [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 844.197899] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore1] 46b6bc45-57f0-4850-9249-6bbb22b162c6/46b6bc45-57f0-4850-9249-6bbb22b162c6.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 844.198255] env[69367]: DEBUG oslo_concurrency.lockutils [req-9a8d4b60-4591-4e9a-b3e7-dd3560df4646 req-1b57f1e6-55e6-41a8-a57a-7788d86a978a service nova] Acquired lock "refresh_cache-54a1f586-481d-427e-ba0b-be90e5573bd3" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 844.198438] env[69367]: DEBUG nova.network.neutron [req-9a8d4b60-4591-4e9a-b3e7-dd3560df4646 req-1b57f1e6-55e6-41a8-a57a-7788d86a978a service nova] [instance: 54a1f586-481d-427e-ba0b-be90e5573bd3] Refreshing network info cache for port 2217ec6c-a6da-4c26-b9de-53239e598080 {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 844.200149] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 54a1f586-481d-427e-ba0b-be90e5573bd3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ee:cb:24', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aa0d7fd6-4fd3-4451-9208-8a3cf25e30e4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2217ec6c-a6da-4c26-b9de-53239e598080', 'vif_model': 'vmxnet3'}] {{(pid=69367) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 844.207592] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 844.208381] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6f244121-ab78-4e55-9c5f-506789bf73c0 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.217751] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 54a1f586-481d-427e-ba0b-be90e5573bd3] Creating VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 844.218619] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Task: {'id': task-4234050, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.219248] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-436d80b7-75d1-4fa8-933f-5a01299fc587 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.237411] env[69367]: DEBUG oslo_concurrency.lockutils [None req-06c99624-611a-4647-83e6-7287799f4e14 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Acquiring lock "48470f96-56d2-4ca2-8078-c5ff4f6db71b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 844.237545] env[69367]: DEBUG oslo_concurrency.lockutils [None req-06c99624-611a-4647-83e6-7287799f4e14 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Lock "48470f96-56d2-4ca2-8078-c5ff4f6db71b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 844.238641] env[69367]: DEBUG oslo_concurrency.lockutils [None req-06c99624-611a-4647-83e6-7287799f4e14 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Acquiring lock "48470f96-56d2-4ca2-8078-c5ff4f6db71b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 844.238641] env[69367]: DEBUG oslo_concurrency.lockutils [None req-06c99624-611a-4647-83e6-7287799f4e14 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Lock "48470f96-56d2-4ca2-8078-c5ff4f6db71b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 844.238641] env[69367]: DEBUG oslo_concurrency.lockutils [None req-06c99624-611a-4647-83e6-7287799f4e14 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Lock "48470f96-56d2-4ca2-8078-c5ff4f6db71b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 844.239802] env[69367]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 844.239802] env[69367]: value = "task-4234051" [ 844.239802] env[69367]: _type = "Task" [ 844.239802] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.242034] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Waiting for the task: (returnval){ [ 844.242034] env[69367]: value = "task-4234052" [ 844.242034] env[69367]: _type = "Task" [ 844.242034] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.242575] env[69367]: INFO nova.compute.manager [None req-06c99624-611a-4647-83e6-7287799f4e14 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Terminating instance [ 844.252663] env[69367]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 844.252663] env[69367]: value = "task-4234053" [ 844.252663] env[69367]: _type = "Task" [ 844.252663] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.264871] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234051, 'name': CreateVM_Task} progress is 10%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.265209] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Task: {'id': task-4234052, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.270948] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234053, 'name': CreateVM_Task} progress is 5%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.367888] env[69367]: DEBUG nova.scheduler.client.report [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Inventory has not changed for provider 19ddf8be-7305-4f70-8366-52a9957232e6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 844.507314] env[69367]: DEBUG nova.network.neutron [req-9a8d4b60-4591-4e9a-b3e7-dd3560df4646 req-1b57f1e6-55e6-41a8-a57a-7788d86a978a service nova] [instance: 54a1f586-481d-427e-ba0b-be90e5573bd3] Updated VIF entry in instance network info cache for port 2217ec6c-a6da-4c26-b9de-53239e598080. {{(pid=69367) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 844.507915] env[69367]: DEBUG nova.network.neutron [req-9a8d4b60-4591-4e9a-b3e7-dd3560df4646 req-1b57f1e6-55e6-41a8-a57a-7788d86a978a service nova] [instance: 54a1f586-481d-427e-ba0b-be90e5573bd3] Updating instance_info_cache with network_info: [{"id": "2217ec6c-a6da-4c26-b9de-53239e598080", "address": "fa:16:3e:ee:cb:24", "network": {"id": "55efdf13-76a4-4190-9a6e-e890b3e8cc39", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1839781723-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cfbdfc3a96db40c8a3e14c797422f08e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa0d7fd6-4fd3-4451-9208-8a3cf25e30e4", "external-id": "nsx-vlan-transportzone-545", "segmentation_id": 545, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2217ec6c-a6", "ovs_interfaceid": "2217ec6c-a6da-4c26-b9de-53239e598080", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 844.540818] env[69367]: DEBUG oslo_concurrency.lockutils [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Acquiring lock "refresh_cache-d900df05-b65c-4a45-94d1-563afbf9c022" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 844.541122] env[69367]: DEBUG oslo_concurrency.lockutils [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Acquired lock "refresh_cache-d900df05-b65c-4a45-94d1-563afbf9c022" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 844.541398] env[69367]: DEBUG nova.network.neutron [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] [instance: d900df05-b65c-4a45-94d1-563afbf9c022] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 844.561564] env[69367]: DEBUG nova.compute.manager [None req-7d4a53ba-51e5-4ef7-a64b-94f8d6538c4f tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: f24baa3c-a91e-4bcc-8d8b-980e8d2a2f05] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 844.588513] env[69367]: DEBUG oslo_vmware.api [None req-54e245f1-820f-4615-9485-964f0c04f9a5 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Task: {'id': task-4234048, 'name': CloneVM_Task, 'duration_secs': 1.023181} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.588866] env[69367]: INFO nova.virt.vmwareapi.vmops [None req-54e245f1-820f-4615-9485-964f0c04f9a5 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Created linked-clone VM from snapshot [ 844.590157] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6f8252d-fa7d-44bb-ab5f-564e67adf6c0 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.602755] env[69367]: DEBUG nova.virt.vmwareapi.images [None req-54e245f1-820f-4615-9485-964f0c04f9a5 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Uploading image 57f6198a-1bce-4321-9d01-6d55899490ca {{(pid=69367) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 844.634998] env[69367]: DEBUG oslo_vmware.rw_handles [None req-54e245f1-820f-4615-9485-964f0c04f9a5 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 844.634998] env[69367]: value = "vm-837742" [ 844.634998] env[69367]: _type = "VirtualMachine" [ 844.634998] env[69367]: }. {{(pid=69367) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 844.635326] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-69e59ea3-9b1d-45f1-b31b-66191cf6b529 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.645383] env[69367]: DEBUG oslo_vmware.rw_handles [None req-54e245f1-820f-4615-9485-964f0c04f9a5 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Lease: (returnval){ [ 844.645383] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52f270a7-ce9b-8327-03b7-78ff51629869" [ 844.645383] env[69367]: _type = "HttpNfcLease" [ 844.645383] env[69367]: } obtained for exporting VM: (result){ [ 844.645383] env[69367]: value = "vm-837742" [ 844.645383] env[69367]: _type = "VirtualMachine" [ 844.645383] env[69367]: }. {{(pid=69367) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 844.645671] env[69367]: DEBUG oslo_vmware.api [None req-54e245f1-820f-4615-9485-964f0c04f9a5 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Waiting for the lease: (returnval){ [ 844.645671] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52f270a7-ce9b-8327-03b7-78ff51629869" [ 844.645671] env[69367]: _type = "HttpNfcLease" [ 844.645671] env[69367]: } to be ready. {{(pid=69367) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 844.655465] env[69367]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 844.655465] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52f270a7-ce9b-8327-03b7-78ff51629869" [ 844.655465] env[69367]: _type = "HttpNfcLease" [ 844.655465] env[69367]: } is initializing. {{(pid=69367) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 844.675198] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Task: {'id': task-4234050, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.755732] env[69367]: DEBUG nova.compute.manager [None req-06c99624-611a-4647-83e6-7287799f4e14 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Start destroying the instance on the hypervisor. {{(pid=69367) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 844.755957] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-06c99624-611a-4647-83e6-7287799f4e14 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Destroying instance {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 844.757319] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-810f412a-170a-4e00-a2fc-8e791c7ed5f3 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.769761] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234051, 'name': CreateVM_Task, 'duration_secs': 0.301477} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.774864] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c272b0ae-6313-46ab-977c-6de255e77675] Created VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 844.775308] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Task: {'id': task-4234052, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.775878] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 844.776057] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 844.776417] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 844.776717] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d844af7a-1741-492e-8415-29007fa4c4ab {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.781477] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-06c99624-611a-4647-83e6-7287799f4e14 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Powering off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 844.785645] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6c910e4d-8f2c-45da-8cc9-82aa6072268d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.787580] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234053, 'name': CreateVM_Task} progress is 25%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.789268] env[69367]: DEBUG oslo_vmware.api [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Waiting for the task: (returnval){ [ 844.789268] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]5221598d-8f89-356e-5715-3b21c1dc3d4f" [ 844.789268] env[69367]: _type = "Task" [ 844.789268] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.796341] env[69367]: DEBUG oslo_vmware.api [None req-06c99624-611a-4647-83e6-7287799f4e14 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Waiting for the task: (returnval){ [ 844.796341] env[69367]: value = "task-4234055" [ 844.796341] env[69367]: _type = "Task" [ 844.796341] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.799836] env[69367]: DEBUG oslo_vmware.api [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]5221598d-8f89-356e-5715-3b21c1dc3d4f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.810169] env[69367]: DEBUG oslo_vmware.api [None req-06c99624-611a-4647-83e6-7287799f4e14 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Task: {'id': task-4234055, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.874469] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.425s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 844.875120] env[69367]: DEBUG nova.compute.manager [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] Start building networks asynchronously for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 844.878465] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0bfe79aa-15cb-4630-92d3-2de33fe7c748 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.848s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 844.878746] env[69367]: DEBUG nova.objects.instance [None req-0bfe79aa-15cb-4630-92d3-2de33fe7c748 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Lazy-loading 'resources' on Instance uuid 837b4093-308b-440b-940d-fc0227a5c590 {{(pid=69367) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 845.011985] env[69367]: DEBUG oslo_concurrency.lockutils [req-9a8d4b60-4591-4e9a-b3e7-dd3560df4646 req-1b57f1e6-55e6-41a8-a57a-7788d86a978a service nova] Releasing lock "refresh_cache-54a1f586-481d-427e-ba0b-be90e5573bd3" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 845.086891] env[69367]: DEBUG oslo_concurrency.lockutils [None req-7d4a53ba-51e5-4ef7-a64b-94f8d6538c4f tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 845.104988] env[69367]: DEBUG nova.network.neutron [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] [instance: d900df05-b65c-4a45-94d1-563afbf9c022] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 845.158840] env[69367]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 845.158840] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52f270a7-ce9b-8327-03b7-78ff51629869" [ 845.158840] env[69367]: _type = "HttpNfcLease" [ 845.158840] env[69367]: } is ready. {{(pid=69367) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 845.159166] env[69367]: DEBUG oslo_vmware.rw_handles [None req-54e245f1-820f-4615-9485-964f0c04f9a5 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 845.159166] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52f270a7-ce9b-8327-03b7-78ff51629869" [ 845.159166] env[69367]: _type = "HttpNfcLease" [ 845.159166] env[69367]: }. {{(pid=69367) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 845.159907] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4292c20e-ad03-4fc3-abca-a8126a422930 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.170710] env[69367]: DEBUG oslo_vmware.rw_handles [None req-54e245f1-820f-4615-9485-964f0c04f9a5 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ac71ee-68b4-2443-2721-639862639e70/disk-0.vmdk from lease info. {{(pid=69367) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 845.170898] env[69367]: DEBUG oslo_vmware.rw_handles [None req-54e245f1-820f-4615-9485-964f0c04f9a5 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ac71ee-68b4-2443-2721-639862639e70/disk-0.vmdk for reading. {{(pid=69367) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 845.178847] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Task: {'id': task-4234050, 'name': PowerOnVM_Task, 'duration_secs': 0.65286} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.229289] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 4a46d003-f57e-4089-aa60-757a4246f071] Powered on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 845.229513] env[69367]: INFO nova.compute.manager [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 4a46d003-f57e-4089-aa60-757a4246f071] Took 9.61 seconds to spawn the instance on the hypervisor. [ 845.229701] env[69367]: DEBUG nova.compute.manager [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 4a46d003-f57e-4089-aa60-757a4246f071] Checking state {{(pid=69367) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 845.233400] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94d2ab23-5285-4370-bb71-d531be97b4d9 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.261026] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Task: {'id': task-4234052, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.586288} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.261026] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore1] 46b6bc45-57f0-4850-9249-6bbb22b162c6/46b6bc45-57f0-4850-9249-6bbb22b162c6.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 845.261026] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 46b6bc45-57f0-4850-9249-6bbb22b162c6] Extending root virtual disk to 1048576 {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 845.261026] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c391148c-61af-4036-a133-7f627917742d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.268541] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234053, 'name': CreateVM_Task, 'duration_secs': 0.695847} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.269701] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 54a1f586-481d-427e-ba0b-be90e5573bd3] Created VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 845.270051] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Waiting for the task: (returnval){ [ 845.270051] env[69367]: value = "task-4234056" [ 845.270051] env[69367]: _type = "Task" [ 845.270051] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.270684] env[69367]: DEBUG oslo_concurrency.lockutils [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 845.279113] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Task: {'id': task-4234056, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.307048] env[69367]: DEBUG oslo_vmware.api [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]5221598d-8f89-356e-5715-3b21c1dc3d4f, 'name': SearchDatastore_Task, 'duration_secs': 0.023885} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.307684] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 845.307956] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] [instance: c272b0ae-6313-46ab-977c-6de255e77675] Processing image 2b099420-9152-4d93-9609-4c9317824c11 {{(pid=69367) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 845.308290] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 845.308405] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 845.308571] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 845.308857] env[69367]: DEBUG oslo_concurrency.lockutils [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 845.309180] env[69367]: DEBUG oslo_concurrency.lockutils [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 845.309479] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3f81648f-5807-4f48-a5df-6155d1d3fa22 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.314278] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ba46d0c5-527d-4624-bf30-7bacb8e69ebc {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.316416] env[69367]: DEBUG oslo_vmware.api [None req-06c99624-611a-4647-83e6-7287799f4e14 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Task: {'id': task-4234055, 'name': PowerOffVM_Task, 'duration_secs': 0.270513} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.320037] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-06c99624-611a-4647-83e6-7287799f4e14 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Powered off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 845.320037] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-06c99624-611a-4647-83e6-7287799f4e14 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Unregistering the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 845.320037] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1ca4ae08-a955-4063-8d42-15d8cec3e69c {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.321645] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Waiting for the task: (returnval){ [ 845.321645] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]521005fb-8ded-2e33-45a5-71c572db80e8" [ 845.321645] env[69367]: _type = "Task" [ 845.321645] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.326443] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 845.326659] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69367) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 845.327769] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-72879b36-3013-4d02-9975-dff6743b1207 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.333460] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]521005fb-8ded-2e33-45a5-71c572db80e8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.337275] env[69367]: DEBUG oslo_vmware.api [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Waiting for the task: (returnval){ [ 845.337275] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]524fae8f-2d74-5663-d794-3f9e49c8afb2" [ 845.337275] env[69367]: _type = "Task" [ 845.337275] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.346137] env[69367]: DEBUG nova.network.neutron [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] [instance: d900df05-b65c-4a45-94d1-563afbf9c022] Updating instance_info_cache with network_info: [{"id": "8ae37b9e-8ee4-4b68-b70f-46238b3bc14e", "address": "fa:16:3e:7f:17:b0", "network": {"id": "607608fe-f8da-45d8-b306-79465fb60ea3", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1667013026-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "294b002e61984ba1a746b51c95e2af4c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d1da5fc2-0280-4f76-ac97-20ea4bc7bb16", "external-id": "nsx-vlan-transportzone-563", "segmentation_id": 563, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ae37b9e-8e", "ovs_interfaceid": "8ae37b9e-8ee4-4b68-b70f-46238b3bc14e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 845.352310] env[69367]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-cef7f93d-67c9-4d31-806d-3cebdd97d3c8 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.354243] env[69367]: DEBUG oslo_vmware.api [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]524fae8f-2d74-5663-d794-3f9e49c8afb2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.382803] env[69367]: DEBUG nova.compute.utils [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Using /dev/sd instead of None {{(pid=69367) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 845.387640] env[69367]: DEBUG nova.compute.manager [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] Allocating IP information in the background. {{(pid=69367) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 845.388130] env[69367]: DEBUG nova.network.neutron [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] allocate_for_instance() {{(pid=69367) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 845.432701] env[69367]: DEBUG nova.policy [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '673f294dc4ca43e2bda6f25869c81239', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a1e0deb2e3174445993c4b644b0e6dab', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69367) authorize /opt/stack/nova/nova/policy.py:192}} [ 845.460941] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-06c99624-611a-4647-83e6-7287799f4e14 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Unregistered the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 845.461331] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-06c99624-611a-4647-83e6-7287799f4e14 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Deleting contents of the VM from datastore datastore2 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 845.461653] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-06c99624-611a-4647-83e6-7287799f4e14 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Deleting the datastore file [datastore2] 48470f96-56d2-4ca2-8078-c5ff4f6db71b {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 845.461971] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e351ab19-5cb9-4f8c-ac31-f7731a5d5f9e {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.473133] env[69367]: DEBUG oslo_vmware.api [None req-06c99624-611a-4647-83e6-7287799f4e14 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Waiting for the task: (returnval){ [ 845.473133] env[69367]: value = "task-4234058" [ 845.473133] env[69367]: _type = "Task" [ 845.473133] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.483479] env[69367]: DEBUG oslo_vmware.api [None req-06c99624-611a-4647-83e6-7287799f4e14 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Task: {'id': task-4234058, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.645991] env[69367]: DEBUG nova.compute.manager [req-ee58bd16-d14a-445e-838d-2443202590bd req-5c3fb3d9-3dbb-4501-b5fc-70b4e919488e service nova] [instance: d900df05-b65c-4a45-94d1-563afbf9c022] Received event network-vif-plugged-8ae37b9e-8ee4-4b68-b70f-46238b3bc14e {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 845.646315] env[69367]: DEBUG oslo_concurrency.lockutils [req-ee58bd16-d14a-445e-838d-2443202590bd req-5c3fb3d9-3dbb-4501-b5fc-70b4e919488e service nova] Acquiring lock "d900df05-b65c-4a45-94d1-563afbf9c022-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 845.646435] env[69367]: DEBUG oslo_concurrency.lockutils [req-ee58bd16-d14a-445e-838d-2443202590bd req-5c3fb3d9-3dbb-4501-b5fc-70b4e919488e service nova] Lock "d900df05-b65c-4a45-94d1-563afbf9c022-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 845.646626] env[69367]: DEBUG oslo_concurrency.lockutils [req-ee58bd16-d14a-445e-838d-2443202590bd req-5c3fb3d9-3dbb-4501-b5fc-70b4e919488e service nova] Lock "d900df05-b65c-4a45-94d1-563afbf9c022-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 845.646765] env[69367]: DEBUG nova.compute.manager [req-ee58bd16-d14a-445e-838d-2443202590bd req-5c3fb3d9-3dbb-4501-b5fc-70b4e919488e service nova] [instance: d900df05-b65c-4a45-94d1-563afbf9c022] No waiting events found dispatching network-vif-plugged-8ae37b9e-8ee4-4b68-b70f-46238b3bc14e {{(pid=69367) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 845.647796] env[69367]: WARNING nova.compute.manager [req-ee58bd16-d14a-445e-838d-2443202590bd req-5c3fb3d9-3dbb-4501-b5fc-70b4e919488e service nova] [instance: d900df05-b65c-4a45-94d1-563afbf9c022] Received unexpected event network-vif-plugged-8ae37b9e-8ee4-4b68-b70f-46238b3bc14e for instance with vm_state building and task_state spawning. [ 845.647796] env[69367]: DEBUG nova.compute.manager [req-ee58bd16-d14a-445e-838d-2443202590bd req-5c3fb3d9-3dbb-4501-b5fc-70b4e919488e service nova] [instance: d900df05-b65c-4a45-94d1-563afbf9c022] Received event network-changed-8ae37b9e-8ee4-4b68-b70f-46238b3bc14e {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 845.647796] env[69367]: DEBUG nova.compute.manager [req-ee58bd16-d14a-445e-838d-2443202590bd req-5c3fb3d9-3dbb-4501-b5fc-70b4e919488e service nova] [instance: d900df05-b65c-4a45-94d1-563afbf9c022] Refreshing instance network info cache due to event network-changed-8ae37b9e-8ee4-4b68-b70f-46238b3bc14e. {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11772}} [ 845.647796] env[69367]: DEBUG oslo_concurrency.lockutils [req-ee58bd16-d14a-445e-838d-2443202590bd req-5c3fb3d9-3dbb-4501-b5fc-70b4e919488e service nova] Acquiring lock "refresh_cache-d900df05-b65c-4a45-94d1-563afbf9c022" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 845.760047] env[69367]: INFO nova.compute.manager [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 4a46d003-f57e-4089-aa60-757a4246f071] Took 34.02 seconds to build instance. [ 845.786687] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Task: {'id': task-4234056, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.245641} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.790056] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 46b6bc45-57f0-4850-9249-6bbb22b162c6] Extended root virtual disk {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 845.791115] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a90a7c68-6006-4f9b-9a45-69a9da020140 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.795490] env[69367]: DEBUG nova.network.neutron [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] Successfully created port: 783fd02a-aef2-4f21-aebf-723120f844c2 {{(pid=69367) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 845.826630] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 46b6bc45-57f0-4850-9249-6bbb22b162c6] Reconfiguring VM instance instance-0000004c to attach disk [datastore1] 46b6bc45-57f0-4850-9249-6bbb22b162c6/46b6bc45-57f0-4850-9249-6bbb22b162c6.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 845.830311] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7a567f7c-22bf-4f37-bd33-c5e5d0b1007a {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.855377] env[69367]: DEBUG oslo_concurrency.lockutils [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Releasing lock "refresh_cache-d900df05-b65c-4a45-94d1-563afbf9c022" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 845.855847] env[69367]: DEBUG nova.compute.manager [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] [instance: d900df05-b65c-4a45-94d1-563afbf9c022] Instance network_info: |[{"id": "8ae37b9e-8ee4-4b68-b70f-46238b3bc14e", "address": "fa:16:3e:7f:17:b0", "network": {"id": "607608fe-f8da-45d8-b306-79465fb60ea3", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1667013026-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "294b002e61984ba1a746b51c95e2af4c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d1da5fc2-0280-4f76-ac97-20ea4bc7bb16", "external-id": "nsx-vlan-transportzone-563", "segmentation_id": 563, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ae37b9e-8e", "ovs_interfaceid": "8ae37b9e-8ee4-4b68-b70f-46238b3bc14e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69367) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 845.856952] env[69367]: DEBUG oslo_concurrency.lockutils [req-ee58bd16-d14a-445e-838d-2443202590bd req-5c3fb3d9-3dbb-4501-b5fc-70b4e919488e service nova] Acquired lock "refresh_cache-d900df05-b65c-4a45-94d1-563afbf9c022" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 845.856952] env[69367]: DEBUG nova.network.neutron [req-ee58bd16-d14a-445e-838d-2443202590bd req-5c3fb3d9-3dbb-4501-b5fc-70b4e919488e service nova] [instance: d900df05-b65c-4a45-94d1-563afbf9c022] Refreshing network info cache for port 8ae37b9e-8ee4-4b68-b70f-46238b3bc14e {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 845.859025] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] [instance: d900df05-b65c-4a45-94d1-563afbf9c022] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7f:17:b0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd1da5fc2-0280-4f76-ac97-20ea4bc7bb16', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8ae37b9e-8ee4-4b68-b70f-46238b3bc14e', 'vif_model': 'vmxnet3'}] {{(pid=69367) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 845.867783] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Creating folder: Project (294b002e61984ba1a746b51c95e2af4c). Parent ref: group-v837645. {{(pid=69367) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 845.884595] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6d6a06a5-dcbd-4867-9132-78189037ab18 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.886763] env[69367]: DEBUG oslo_vmware.api [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]524fae8f-2d74-5663-d794-3f9e49c8afb2, 'name': SearchDatastore_Task, 'duration_secs': 0.010581} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.887445] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]521005fb-8ded-2e33-45a5-71c572db80e8, 'name': SearchDatastore_Task, 'duration_secs': 0.011111} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.887822] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Waiting for the task: (returnval){ [ 845.887822] env[69367]: value = "task-4234059" [ 845.887822] env[69367]: _type = "Task" [ 845.887822] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.889393] env[69367]: DEBUG nova.compute.manager [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] Start building block device mappings for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 845.892489] env[69367]: DEBUG oslo_concurrency.lockutils [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 845.892872] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 54a1f586-481d-427e-ba0b-be90e5573bd3] Processing image 2b099420-9152-4d93-9609-4c9317824c11 {{(pid=69367) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 845.893218] env[69367]: DEBUG oslo_concurrency.lockutils [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 845.894742] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bfdef940-7099-4ec8-9918-922a355ed471 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.903991] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fd11190-73a1-4b31-b7b5-a07ac651f832 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.912470] env[69367]: DEBUG oslo_vmware.api [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Waiting for the task: (returnval){ [ 845.912470] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]528753ed-29d2-d9bb-25ef-35831231d3e8" [ 845.912470] env[69367]: _type = "Task" [ 845.912470] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.919754] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Task: {'id': task-4234059, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.925604] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fcf8405-47f5-48e6-8957-17c00240fbe7 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.932191] env[69367]: INFO nova.virt.vmwareapi.vm_util [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Created folder: Project (294b002e61984ba1a746b51c95e2af4c) in parent group-v837645. [ 845.932594] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Creating folder: Instances. Parent ref: group-v837745. {{(pid=69367) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 845.933431] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f24ae6e2-3f8f-41c6-9c02-ecb5dc6d37a2 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.944192] env[69367]: DEBUG oslo_vmware.api [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]528753ed-29d2-d9bb-25ef-35831231d3e8, 'name': SearchDatastore_Task, 'duration_secs': 0.013962} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.974392] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 845.975115] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore1] c272b0ae-6313-46ab-977c-6de255e77675/c272b0ae-6313-46ab-977c-6de255e77675.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 845.977500] env[69367]: DEBUG oslo_concurrency.lockutils [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 845.977796] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 845.978129] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fce5eafa-6038-4c66-8e9e-3f3af5003ce7 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.984465] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-391cd019-16d8-49ad-af82-3f27da275ddd {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.987262] env[69367]: INFO nova.virt.vmwareapi.vm_util [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Created folder: Instances in parent group-v837745. [ 845.987631] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 845.988230] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d15793a4-0993-4abc-adfa-e73ed0beb5bc {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.990422] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d900df05-b65c-4a45-94d1-563afbf9c022] Creating VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 845.991960] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d1ccae81-474a-46ce-86ba-efd3a93d6663 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.015647] env[69367]: DEBUG oslo_vmware.api [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Waiting for the task: (returnval){ [ 846.015647] env[69367]: value = "task-4234062" [ 846.015647] env[69367]: _type = "Task" [ 846.015647] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.015985] env[69367]: DEBUG oslo_vmware.api [None req-06c99624-611a-4647-83e6-7287799f4e14 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Task: {'id': task-4234058, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.225385} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.019589] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e788381f-5285-4c43-ab36-c09825680a8f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.023936] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-06c99624-611a-4647-83e6-7287799f4e14 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Deleted the datastore file {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 846.024292] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-06c99624-611a-4647-83e6-7287799f4e14 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Deleted contents of the VM from datastore datastore2 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 846.024558] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-06c99624-611a-4647-83e6-7287799f4e14 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Instance destroyed {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 846.024806] env[69367]: INFO nova.compute.manager [None req-06c99624-611a-4647-83e6-7287799f4e14 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Took 1.27 seconds to destroy the instance on the hypervisor. [ 846.025119] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-06c99624-611a-4647-83e6-7287799f4e14 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 846.032059] env[69367]: DEBUG nova.compute.manager [-] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 846.032371] env[69367]: DEBUG nova.network.neutron [-] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 846.034262] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 846.034492] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69367) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 846.036283] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1603be7f-096b-49e2-84c3-4367f75dc844 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.049825] env[69367]: DEBUG nova.compute.provider_tree [None req-0bfe79aa-15cb-4630-92d3-2de33fe7c748 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 846.058453] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Waiting for the task: (returnval){ [ 846.058453] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52198c07-1941-1ea1-3f59-ed2d613fe053" [ 846.058453] env[69367]: _type = "Task" [ 846.058453] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.058760] env[69367]: DEBUG oslo_vmware.api [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Task: {'id': task-4234062, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.058917] env[69367]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 846.058917] env[69367]: value = "task-4234063" [ 846.058917] env[69367]: _type = "Task" [ 846.058917] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.078838] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52198c07-1941-1ea1-3f59-ed2d613fe053, 'name': SearchDatastore_Task, 'duration_secs': 0.01272} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.084866] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234063, 'name': CreateVM_Task} progress is 6%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.085081] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4f66da1c-eee9-4ef1-96ca-5dad20875a0f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.094724] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Waiting for the task: (returnval){ [ 846.094724] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]5253e01f-7fff-2e9f-37e2-8d6ad98d929a" [ 846.094724] env[69367]: _type = "Task" [ 846.094724] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.106159] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]5253e01f-7fff-2e9f-37e2-8d6ad98d929a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.263967] env[69367]: DEBUG oslo_concurrency.lockutils [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Lock "4a46d003-f57e-4089-aa60-757a4246f071" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.186s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 846.379342] env[69367]: DEBUG nova.network.neutron [req-ee58bd16-d14a-445e-838d-2443202590bd req-5c3fb3d9-3dbb-4501-b5fc-70b4e919488e service nova] [instance: d900df05-b65c-4a45-94d1-563afbf9c022] Updated VIF entry in instance network info cache for port 8ae37b9e-8ee4-4b68-b70f-46238b3bc14e. {{(pid=69367) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 846.379342] env[69367]: DEBUG nova.network.neutron [req-ee58bd16-d14a-445e-838d-2443202590bd req-5c3fb3d9-3dbb-4501-b5fc-70b4e919488e service nova] [instance: d900df05-b65c-4a45-94d1-563afbf9c022] Updating instance_info_cache with network_info: [{"id": "8ae37b9e-8ee4-4b68-b70f-46238b3bc14e", "address": "fa:16:3e:7f:17:b0", "network": {"id": "607608fe-f8da-45d8-b306-79465fb60ea3", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1667013026-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "294b002e61984ba1a746b51c95e2af4c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d1da5fc2-0280-4f76-ac97-20ea4bc7bb16", "external-id": "nsx-vlan-transportzone-563", "segmentation_id": 563, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ae37b9e-8e", "ovs_interfaceid": "8ae37b9e-8ee4-4b68-b70f-46238b3bc14e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 846.413946] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Task: {'id': task-4234059, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.541174] env[69367]: DEBUG oslo_vmware.api [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Task: {'id': task-4234062, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.580157] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234063, 'name': CreateVM_Task} progress is 25%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.589083] env[69367]: ERROR nova.scheduler.client.report [None req-0bfe79aa-15cb-4630-92d3-2de33fe7c748 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [req-67f6eb0b-5d25-4df6-97ff-f4f7729079fa] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-67f6eb0b-5d25-4df6-97ff-f4f7729079fa"}]} [ 846.589185] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0bfe79aa-15cb-4630-92d3-2de33fe7c748 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.711s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 846.590318] env[69367]: ERROR nova.compute.manager [None req-0bfe79aa-15cb-4630-92d3-2de33fe7c748 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Setting instance vm_state to ERROR: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 846.590318] env[69367]: ERROR nova.compute.manager [instance: 837b4093-308b-440b-940d-fc0227a5c590] Traceback (most recent call last): [ 846.590318] env[69367]: ERROR nova.compute.manager [instance: 837b4093-308b-440b-940d-fc0227a5c590] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 846.590318] env[69367]: ERROR nova.compute.manager [instance: 837b4093-308b-440b-940d-fc0227a5c590] yield [ 846.590318] env[69367]: ERROR nova.compute.manager [instance: 837b4093-308b-440b-940d-fc0227a5c590] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 846.590318] env[69367]: ERROR nova.compute.manager [instance: 837b4093-308b-440b-940d-fc0227a5c590] self.set_inventory_for_provider( [ 846.590318] env[69367]: ERROR nova.compute.manager [instance: 837b4093-308b-440b-940d-fc0227a5c590] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 846.590318] env[69367]: ERROR nova.compute.manager [instance: 837b4093-308b-440b-940d-fc0227a5c590] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 846.590318] env[69367]: ERROR nova.compute.manager [instance: 837b4093-308b-440b-940d-fc0227a5c590] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-67f6eb0b-5d25-4df6-97ff-f4f7729079fa"}]} [ 846.590318] env[69367]: ERROR nova.compute.manager [instance: 837b4093-308b-440b-940d-fc0227a5c590] [ 846.590318] env[69367]: ERROR nova.compute.manager [instance: 837b4093-308b-440b-940d-fc0227a5c590] During handling of the above exception, another exception occurred: [ 846.590318] env[69367]: ERROR nova.compute.manager [instance: 837b4093-308b-440b-940d-fc0227a5c590] [ 846.590318] env[69367]: ERROR nova.compute.manager [instance: 837b4093-308b-440b-940d-fc0227a5c590] Traceback (most recent call last): [ 846.590318] env[69367]: ERROR nova.compute.manager [instance: 837b4093-308b-440b-940d-fc0227a5c590] File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 846.590318] env[69367]: ERROR nova.compute.manager [instance: 837b4093-308b-440b-940d-fc0227a5c590] self._delete_instance(context, instance, bdms) [ 846.590318] env[69367]: ERROR nova.compute.manager [instance: 837b4093-308b-440b-940d-fc0227a5c590] File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 846.590318] env[69367]: ERROR nova.compute.manager [instance: 837b4093-308b-440b-940d-fc0227a5c590] self._complete_deletion(context, instance) [ 846.590318] env[69367]: ERROR nova.compute.manager [instance: 837b4093-308b-440b-940d-fc0227a5c590] File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 846.590318] env[69367]: ERROR nova.compute.manager [instance: 837b4093-308b-440b-940d-fc0227a5c590] self._update_resource_tracker(context, instance) [ 846.590318] env[69367]: ERROR nova.compute.manager [instance: 837b4093-308b-440b-940d-fc0227a5c590] File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 846.590318] env[69367]: ERROR nova.compute.manager [instance: 837b4093-308b-440b-940d-fc0227a5c590] self.rt.update_usage(context, instance, instance.node) [ 846.590318] env[69367]: ERROR nova.compute.manager [instance: 837b4093-308b-440b-940d-fc0227a5c590] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 846.590318] env[69367]: ERROR nova.compute.manager [instance: 837b4093-308b-440b-940d-fc0227a5c590] return f(*args, **kwargs) [ 846.590318] env[69367]: ERROR nova.compute.manager [instance: 837b4093-308b-440b-940d-fc0227a5c590] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 846.590318] env[69367]: ERROR nova.compute.manager [instance: 837b4093-308b-440b-940d-fc0227a5c590] self._update(context.elevated(), self.compute_nodes[nodename]) [ 846.590318] env[69367]: ERROR nova.compute.manager [instance: 837b4093-308b-440b-940d-fc0227a5c590] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 846.590318] env[69367]: ERROR nova.compute.manager [instance: 837b4093-308b-440b-940d-fc0227a5c590] self._update_to_placement(context, compute_node, startup) [ 846.590318] env[69367]: ERROR nova.compute.manager [instance: 837b4093-308b-440b-940d-fc0227a5c590] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 846.590318] env[69367]: ERROR nova.compute.manager [instance: 837b4093-308b-440b-940d-fc0227a5c590] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 846.590318] env[69367]: ERROR nova.compute.manager [instance: 837b4093-308b-440b-940d-fc0227a5c590] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 846.590318] env[69367]: ERROR nova.compute.manager [instance: 837b4093-308b-440b-940d-fc0227a5c590] return attempt.get(self._wrap_exception) [ 846.590318] env[69367]: ERROR nova.compute.manager [instance: 837b4093-308b-440b-940d-fc0227a5c590] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 846.590318] env[69367]: ERROR nova.compute.manager [instance: 837b4093-308b-440b-940d-fc0227a5c590] six.reraise(self.value[0], self.value[1], self.value[2]) [ 846.590318] env[69367]: ERROR nova.compute.manager [instance: 837b4093-308b-440b-940d-fc0227a5c590] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 846.592559] env[69367]: ERROR nova.compute.manager [instance: 837b4093-308b-440b-940d-fc0227a5c590] raise value [ 846.592559] env[69367]: ERROR nova.compute.manager [instance: 837b4093-308b-440b-940d-fc0227a5c590] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 846.592559] env[69367]: ERROR nova.compute.manager [instance: 837b4093-308b-440b-940d-fc0227a5c590] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 846.592559] env[69367]: ERROR nova.compute.manager [instance: 837b4093-308b-440b-940d-fc0227a5c590] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 846.592559] env[69367]: ERROR nova.compute.manager [instance: 837b4093-308b-440b-940d-fc0227a5c590] self.reportclient.update_from_provider_tree( [ 846.592559] env[69367]: ERROR nova.compute.manager [instance: 837b4093-308b-440b-940d-fc0227a5c590] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 846.592559] env[69367]: ERROR nova.compute.manager [instance: 837b4093-308b-440b-940d-fc0227a5c590] with catch_all(pd.uuid): [ 846.592559] env[69367]: ERROR nova.compute.manager [instance: 837b4093-308b-440b-940d-fc0227a5c590] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 846.592559] env[69367]: ERROR nova.compute.manager [instance: 837b4093-308b-440b-940d-fc0227a5c590] self.gen.throw(typ, value, traceback) [ 846.592559] env[69367]: ERROR nova.compute.manager [instance: 837b4093-308b-440b-940d-fc0227a5c590] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 846.592559] env[69367]: ERROR nova.compute.manager [instance: 837b4093-308b-440b-940d-fc0227a5c590] raise exception.ResourceProviderSyncFailed() [ 846.592559] env[69367]: ERROR nova.compute.manager [instance: 837b4093-308b-440b-940d-fc0227a5c590] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 846.592559] env[69367]: ERROR nova.compute.manager [instance: 837b4093-308b-440b-940d-fc0227a5c590] [ 846.592888] env[69367]: DEBUG oslo_concurrency.lockutils [None req-17171b96-ca57-47e5-925e-adb21efe7e04 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.089s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 846.597022] env[69367]: DEBUG nova.objects.instance [None req-17171b96-ca57-47e5-925e-adb21efe7e04 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Lazy-loading 'resources' on Instance uuid f8c07fa1-d27c-4d0f-847b-481477cd04bf {{(pid=69367) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 846.614943] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]5253e01f-7fff-2e9f-37e2-8d6ad98d929a, 'name': SearchDatastore_Task, 'duration_secs': 0.016101} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.616952] env[69367]: DEBUG oslo_concurrency.lockutils [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 846.617397] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore1] 54a1f586-481d-427e-ba0b-be90e5573bd3/54a1f586-481d-427e-ba0b-be90e5573bd3.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 846.617758] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d374e5d5-7062-410c-a47d-6870b0b1233e {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.626449] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Waiting for the task: (returnval){ [ 846.626449] env[69367]: value = "task-4234064" [ 846.626449] env[69367]: _type = "Task" [ 846.626449] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.638162] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Task: {'id': task-4234064, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.664898] env[69367]: DEBUG nova.compute.manager [req-3c6935b3-f03f-43c4-be2b-f82aaef627a3 req-917eb78c-04f4-4c89-ad85-9e466f2aea91 service nova] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Received event network-vif-deleted-645e6040-4204-42a3-8600-608066c94ade {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 846.665557] env[69367]: INFO nova.compute.manager [req-3c6935b3-f03f-43c4-be2b-f82aaef627a3 req-917eb78c-04f4-4c89-ad85-9e466f2aea91 service nova] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Neutron deleted interface 645e6040-4204-42a3-8600-608066c94ade; detaching it from the instance and deleting it from the info cache [ 846.665754] env[69367]: DEBUG nova.network.neutron [req-3c6935b3-f03f-43c4-be2b-f82aaef627a3 req-917eb78c-04f4-4c89-ad85-9e466f2aea91 service nova] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Updating instance_info_cache with network_info: [{"id": "7485ac3f-c5a1-4b84-a33a-afb79101e2e9", "address": "fa:16:3e:a0:08:3e", "network": {"id": "757a47fe-101f-496c-951e-88755d3ed618", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1957959951", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.111", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68ad9e06b1fb4e5bbad98a14e0c55c60", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b5291d0-ee0f-4d70-b2ae-ab6879a67b08", "external-id": "nsx-vlan-transportzone-597", "segmentation_id": 597, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7485ac3f-c5", "ovs_interfaceid": "7485ac3f-c5a1-4b84-a33a-afb79101e2e9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "19be5951-2642-49af-aaa5-d15bc24f0434", "address": "fa:16:3e:1e:56:8f", "network": {"id": "21e431ac-8ad6-4f28-87c8-85b2890870f8", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1989947687", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.51", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "68ad9e06b1fb4e5bbad98a14e0c55c60", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c2d3bf80-d60a-4b53-a00a-1381de6d4a12", "external-id": "nsx-vlan-transportzone-982", "segmentation_id": 982, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap19be5951-26", "ovs_interfaceid": "19be5951-2642-49af-aaa5-d15bc24f0434", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 846.882782] env[69367]: DEBUG oslo_concurrency.lockutils [req-ee58bd16-d14a-445e-838d-2443202590bd req-5c3fb3d9-3dbb-4501-b5fc-70b4e919488e service nova] Releasing lock "refresh_cache-d900df05-b65c-4a45-94d1-563afbf9c022" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 846.907413] env[69367]: DEBUG nova.compute.manager [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] Start spawning the instance on the hypervisor. {{(pid=69367) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 846.909888] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Task: {'id': task-4234059, 'name': ReconfigVM_Task, 'duration_secs': 0.667681} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.910503] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 46b6bc45-57f0-4850-9249-6bbb22b162c6] Reconfigured VM instance instance-0000004c to attach disk [datastore1] 46b6bc45-57f0-4850-9249-6bbb22b162c6/46b6bc45-57f0-4850-9249-6bbb22b162c6.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 846.911209] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-992b1f46-d179-4d70-a38a-4965d7cb522a {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.922326] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Waiting for the task: (returnval){ [ 846.922326] env[69367]: value = "task-4234065" [ 846.922326] env[69367]: _type = "Task" [ 846.922326] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.934042] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Task: {'id': task-4234065, 'name': Rename_Task} progress is 5%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.944658] env[69367]: DEBUG nova.virt.hardware [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-19T12:49:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-19T12:49:28Z,direct_url=,disk_format='vmdk',id=2b099420-9152-4d93-9609-4c9317824c11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cd7c200d5cd6461fb951580f8c764c42',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-19T12:49:29Z,virtual_size=,visibility=), allow threads: False {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 846.944658] env[69367]: DEBUG nova.virt.hardware [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Flavor limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 846.944960] env[69367]: DEBUG nova.virt.hardware [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Image limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 846.945024] env[69367]: DEBUG nova.virt.hardware [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Flavor pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 846.945169] env[69367]: DEBUG nova.virt.hardware [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Image pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 846.945408] env[69367]: DEBUG nova.virt.hardware [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 846.945579] env[69367]: DEBUG nova.virt.hardware [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 846.945791] env[69367]: DEBUG nova.virt.hardware [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 846.946050] env[69367]: DEBUG nova.virt.hardware [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Got 1 possible topologies {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 846.946400] env[69367]: DEBUG nova.virt.hardware [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 846.947162] env[69367]: DEBUG nova.virt.hardware [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 846.948043] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb61d36e-d789-470e-ab22-e1540070f63f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.957684] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01615bde-bc87-4a26-8440-1bf0d3f995cc {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.037670] env[69367]: DEBUG oslo_vmware.api [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Task: {'id': task-4234062, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.626753} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.038345] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore1] c272b0ae-6313-46ab-977c-6de255e77675/c272b0ae-6313-46ab-977c-6de255e77675.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 847.038587] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] [instance: c272b0ae-6313-46ab-977c-6de255e77675] Extending root virtual disk to 1048576 {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 847.038866] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-07624f95-1f0e-49ad-815f-f581e7ca933e {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.047622] env[69367]: DEBUG oslo_vmware.api [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Waiting for the task: (returnval){ [ 847.047622] env[69367]: value = "task-4234066" [ 847.047622] env[69367]: _type = "Task" [ 847.047622] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.058420] env[69367]: DEBUG oslo_vmware.api [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Task: {'id': task-4234066, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.074786] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234063, 'name': CreateVM_Task, 'duration_secs': 0.740051} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.075012] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d900df05-b65c-4a45-94d1-563afbf9c022] Created VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 847.075986] env[69367]: DEBUG oslo_concurrency.lockutils [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 847.076244] env[69367]: DEBUG oslo_concurrency.lockutils [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 847.076981] env[69367]: DEBUG oslo_concurrency.lockutils [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 847.077430] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6bfb4f9c-9e62-4696-a79c-db3afae9924a {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.084647] env[69367]: DEBUG oslo_vmware.api [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Waiting for the task: (returnval){ [ 847.084647] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52a472d8-fe89-7dd0-f692-0059eb0736a7" [ 847.084647] env[69367]: _type = "Task" [ 847.084647] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.096093] env[69367]: DEBUG oslo_vmware.api [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52a472d8-fe89-7dd0-f692-0059eb0736a7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.105264] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0bfe79aa-15cb-4630-92d3-2de33fe7c748 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Lock "837b4093-308b-440b-940d-fc0227a5c590" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.990s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 847.125639] env[69367]: DEBUG nova.scheduler.client.report [None req-17171b96-ca57-47e5-925e-adb21efe7e04 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 847.138745] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Task: {'id': task-4234064, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.147807] env[69367]: DEBUG nova.scheduler.client.report [None req-17171b96-ca57-47e5-925e-adb21efe7e04 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 847.147807] env[69367]: DEBUG nova.compute.provider_tree [None req-17171b96-ca57-47e5-925e-adb21efe7e04 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 847.160044] env[69367]: DEBUG nova.scheduler.client.report [None req-17171b96-ca57-47e5-925e-adb21efe7e04 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 847.169685] env[69367]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4ad880e6-e165-46ac-8d55-543452e7d321 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.179327] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fecfa5ce-7a63-421a-9109-428cf104927d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.191536] env[69367]: DEBUG nova.scheduler.client.report [None req-17171b96-ca57-47e5-925e-adb21efe7e04 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 847.214469] env[69367]: DEBUG nova.compute.manager [req-3c6935b3-f03f-43c4-be2b-f82aaef627a3 req-917eb78c-04f4-4c89-ad85-9e466f2aea91 service nova] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Detach interface failed, port_id=645e6040-4204-42a3-8600-608066c94ade, reason: Instance 48470f96-56d2-4ca2-8078-c5ff4f6db71b could not be found. {{(pid=69367) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11601}} [ 847.435409] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Task: {'id': task-4234065, 'name': Rename_Task, 'duration_secs': 0.32929} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.435939] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 46b6bc45-57f0-4850-9249-6bbb22b162c6] Powering on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 847.436313] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-562df8e0-b3f2-40dd-a1bd-e2d42fef3a03 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.449021] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Waiting for the task: (returnval){ [ 847.449021] env[69367]: value = "task-4234067" [ 847.449021] env[69367]: _type = "Task" [ 847.449021] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.457544] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Task: {'id': task-4234067, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.470049] env[69367]: DEBUG nova.network.neutron [-] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 847.566037] env[69367]: DEBUG oslo_vmware.api [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Task: {'id': task-4234066, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.106576} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.571205] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] [instance: c272b0ae-6313-46ab-977c-6de255e77675] Extended root virtual disk {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 847.571685] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-949bb9ec-5893-4061-b3fc-3c46292295c5 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.605797] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] [instance: c272b0ae-6313-46ab-977c-6de255e77675] Reconfiguring VM instance instance-0000004a to attach disk [datastore1] c272b0ae-6313-46ab-977c-6de255e77675/c272b0ae-6313-46ab-977c-6de255e77675.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 847.621181] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e5881f21-032b-4257-b191-eae9afbd34f3 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.647082] env[69367]: DEBUG oslo_vmware.api [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52a472d8-fe89-7dd0-f692-0059eb0736a7, 'name': SearchDatastore_Task, 'duration_secs': 0.05764} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.650925] env[69367]: DEBUG oslo_concurrency.lockutils [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 847.650925] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] [instance: d900df05-b65c-4a45-94d1-563afbf9c022] Processing image 2b099420-9152-4d93-9609-4c9317824c11 {{(pid=69367) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 847.650925] env[69367]: DEBUG oslo_concurrency.lockutils [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 847.650925] env[69367]: DEBUG oslo_concurrency.lockutils [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 847.650925] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 847.651177] env[69367]: DEBUG oslo_vmware.api [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Waiting for the task: (returnval){ [ 847.651177] env[69367]: value = "task-4234068" [ 847.651177] env[69367]: _type = "Task" [ 847.651177] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.654345] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-54d161fe-774f-4470-81df-8ea197b7148a {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.664961] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Task: {'id': task-4234064, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.53467} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.666387] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore1] 54a1f586-481d-427e-ba0b-be90e5573bd3/54a1f586-481d-427e-ba0b-be90e5573bd3.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 847.666629] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 54a1f586-481d-427e-ba0b-be90e5573bd3] Extending root virtual disk to 1048576 {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 847.667618] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bdb5523d-5140-423d-95cf-3fd7ec691672 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.678547] env[69367]: DEBUG oslo_vmware.api [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Task: {'id': task-4234068, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.679168] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 847.679168] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69367) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 847.681235] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-432f665d-3bfb-4305-b1a3-4f7934c2fb62 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.685246] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-66869a40-7cc0-4382-828d-2c439139facb {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.688237] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Waiting for the task: (returnval){ [ 847.688237] env[69367]: value = "task-4234069" [ 847.688237] env[69367]: _type = "Task" [ 847.688237] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.689277] env[69367]: DEBUG nova.network.neutron [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] Successfully updated port: 783fd02a-aef2-4f21-aebf-723120f844c2 {{(pid=69367) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 847.700528] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5552b7b-570c-416a-8e07-ee7f1f22e184 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.704825] env[69367]: DEBUG oslo_vmware.api [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Waiting for the task: (returnval){ [ 847.704825] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52ce4a1e-c42e-ca7b-bf65-d89e6c849d7c" [ 847.704825] env[69367]: _type = "Task" [ 847.704825] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.709622] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Task: {'id': task-4234069, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.742324] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf255b7d-8054-4a9b-b133-ccf771ff18ac {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.749261] env[69367]: DEBUG oslo_vmware.api [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52ce4a1e-c42e-ca7b-bf65-d89e6c849d7c, 'name': SearchDatastore_Task, 'duration_secs': 0.012926} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.750607] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f0d650de-b004-4f17-ab84-7a90fdb4d45e {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.758940] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-504fbcb7-34b0-44c4-9a9c-d05de6f5a579 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.765152] env[69367]: DEBUG oslo_vmware.api [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Waiting for the task: (returnval){ [ 847.765152] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52aa4eda-3996-01d6-9eff-c94e2507b295" [ 847.765152] env[69367]: _type = "Task" [ 847.765152] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.778779] env[69367]: DEBUG nova.compute.provider_tree [None req-17171b96-ca57-47e5-925e-adb21efe7e04 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 847.784201] env[69367]: DEBUG nova.compute.manager [req-1d50487c-eb5c-45d8-a1f7-73380fb99869 req-b100c59c-5239-45d9-b5cc-f76a0b16398f service nova] [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] Received event network-vif-plugged-783fd02a-aef2-4f21-aebf-723120f844c2 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 847.784201] env[69367]: DEBUG oslo_concurrency.lockutils [req-1d50487c-eb5c-45d8-a1f7-73380fb99869 req-b100c59c-5239-45d9-b5cc-f76a0b16398f service nova] Acquiring lock "05aae150-5d86-4210-ae7e-8c63e83cb907-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 847.784201] env[69367]: DEBUG oslo_concurrency.lockutils [req-1d50487c-eb5c-45d8-a1f7-73380fb99869 req-b100c59c-5239-45d9-b5cc-f76a0b16398f service nova] Lock "05aae150-5d86-4210-ae7e-8c63e83cb907-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 847.784201] env[69367]: DEBUG oslo_concurrency.lockutils [req-1d50487c-eb5c-45d8-a1f7-73380fb99869 req-b100c59c-5239-45d9-b5cc-f76a0b16398f service nova] Lock "05aae150-5d86-4210-ae7e-8c63e83cb907-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 847.784201] env[69367]: DEBUG nova.compute.manager [req-1d50487c-eb5c-45d8-a1f7-73380fb99869 req-b100c59c-5239-45d9-b5cc-f76a0b16398f service nova] [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] No waiting events found dispatching network-vif-plugged-783fd02a-aef2-4f21-aebf-723120f844c2 {{(pid=69367) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 847.784201] env[69367]: WARNING nova.compute.manager [req-1d50487c-eb5c-45d8-a1f7-73380fb99869 req-b100c59c-5239-45d9-b5cc-f76a0b16398f service nova] [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] Received unexpected event network-vif-plugged-783fd02a-aef2-4f21-aebf-723120f844c2 for instance with vm_state building and task_state spawning. [ 847.788797] env[69367]: DEBUG oslo_vmware.api [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52aa4eda-3996-01d6-9eff-c94e2507b295, 'name': SearchDatastore_Task, 'duration_secs': 0.011593} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.789091] env[69367]: DEBUG oslo_concurrency.lockutils [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 847.789405] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore1] d900df05-b65c-4a45-94d1-563afbf9c022/d900df05-b65c-4a45-94d1-563afbf9c022.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 847.789678] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-33e0f5dd-0ecb-4872-83d1-bc98f789d02b {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.798644] env[69367]: DEBUG oslo_vmware.api [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Waiting for the task: (returnval){ [ 847.798644] env[69367]: value = "task-4234070" [ 847.798644] env[69367]: _type = "Task" [ 847.798644] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.809558] env[69367]: DEBUG oslo_vmware.api [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Task: {'id': task-4234070, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.959052] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Task: {'id': task-4234067, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.971938] env[69367]: INFO nova.compute.manager [-] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Took 1.94 seconds to deallocate network for instance. [ 848.170948] env[69367]: DEBUG oslo_vmware.api [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Task: {'id': task-4234068, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.196763] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Acquiring lock "refresh_cache-05aae150-5d86-4210-ae7e-8c63e83cb907" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 848.196763] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Acquired lock "refresh_cache-05aae150-5d86-4210-ae7e-8c63e83cb907" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 848.196763] env[69367]: DEBUG nova.network.neutron [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 848.206122] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Task: {'id': task-4234069, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.146454} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.206914] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 54a1f586-481d-427e-ba0b-be90e5573bd3] Extended root virtual disk {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 848.207311] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a5a16d3-398d-4868-a4dc-f81517454534 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.235567] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 54a1f586-481d-427e-ba0b-be90e5573bd3] Reconfiguring VM instance instance-0000004d to attach disk [datastore1] 54a1f586-481d-427e-ba0b-be90e5573bd3/54a1f586-481d-427e-ba0b-be90e5573bd3.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 848.236515] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d9738273-43f8-46d6-91c6-97633a9e75f1 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.262432] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Waiting for the task: (returnval){ [ 848.262432] env[69367]: value = "task-4234071" [ 848.262432] env[69367]: _type = "Task" [ 848.262432] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.276985] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Task: {'id': task-4234071, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.310385] env[69367]: ERROR nova.scheduler.client.report [None req-17171b96-ca57-47e5-925e-adb21efe7e04 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] [req-31a586ea-22c8-4155-ab8b-a077619c2aaf] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-31a586ea-22c8-4155-ab8b-a077619c2aaf"}]} [ 848.311025] env[69367]: DEBUG oslo_concurrency.lockutils [None req-17171b96-ca57-47e5-925e-adb21efe7e04 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.718s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 848.312121] env[69367]: ERROR nova.compute.manager [None req-17171b96-ca57-47e5-925e-adb21efe7e04 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] Setting instance vm_state to ERROR: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 848.312121] env[69367]: ERROR nova.compute.manager [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] Traceback (most recent call last): [ 848.312121] env[69367]: ERROR nova.compute.manager [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 848.312121] env[69367]: ERROR nova.compute.manager [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] yield [ 848.312121] env[69367]: ERROR nova.compute.manager [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 848.312121] env[69367]: ERROR nova.compute.manager [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] self.set_inventory_for_provider( [ 848.312121] env[69367]: ERROR nova.compute.manager [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 848.312121] env[69367]: ERROR nova.compute.manager [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 848.312121] env[69367]: ERROR nova.compute.manager [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-31a586ea-22c8-4155-ab8b-a077619c2aaf"}]} [ 848.312121] env[69367]: ERROR nova.compute.manager [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] [ 848.312121] env[69367]: ERROR nova.compute.manager [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] During handling of the above exception, another exception occurred: [ 848.312121] env[69367]: ERROR nova.compute.manager [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] [ 848.312121] env[69367]: ERROR nova.compute.manager [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] Traceback (most recent call last): [ 848.312121] env[69367]: ERROR nova.compute.manager [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 848.312121] env[69367]: ERROR nova.compute.manager [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] self._delete_instance(context, instance, bdms) [ 848.312121] env[69367]: ERROR nova.compute.manager [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 848.312121] env[69367]: ERROR nova.compute.manager [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] self._complete_deletion(context, instance) [ 848.312121] env[69367]: ERROR nova.compute.manager [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 848.312121] env[69367]: ERROR nova.compute.manager [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] self._update_resource_tracker(context, instance) [ 848.312121] env[69367]: ERROR nova.compute.manager [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 848.312121] env[69367]: ERROR nova.compute.manager [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] self.rt.update_usage(context, instance, instance.node) [ 848.312121] env[69367]: ERROR nova.compute.manager [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 848.312121] env[69367]: ERROR nova.compute.manager [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] return f(*args, **kwargs) [ 848.312121] env[69367]: ERROR nova.compute.manager [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 848.312121] env[69367]: ERROR nova.compute.manager [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] self._update(context.elevated(), self.compute_nodes[nodename]) [ 848.312121] env[69367]: ERROR nova.compute.manager [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 848.312121] env[69367]: ERROR nova.compute.manager [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] self._update_to_placement(context, compute_node, startup) [ 848.312121] env[69367]: ERROR nova.compute.manager [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 848.312121] env[69367]: ERROR nova.compute.manager [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 848.312121] env[69367]: ERROR nova.compute.manager [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 848.312121] env[69367]: ERROR nova.compute.manager [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] return attempt.get(self._wrap_exception) [ 848.312121] env[69367]: ERROR nova.compute.manager [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 848.312121] env[69367]: ERROR nova.compute.manager [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] six.reraise(self.value[0], self.value[1], self.value[2]) [ 848.312121] env[69367]: ERROR nova.compute.manager [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 848.313619] env[69367]: ERROR nova.compute.manager [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] raise value [ 848.313619] env[69367]: ERROR nova.compute.manager [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 848.313619] env[69367]: ERROR nova.compute.manager [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 848.313619] env[69367]: ERROR nova.compute.manager [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 848.313619] env[69367]: ERROR nova.compute.manager [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] self.reportclient.update_from_provider_tree( [ 848.313619] env[69367]: ERROR nova.compute.manager [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 848.313619] env[69367]: ERROR nova.compute.manager [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] with catch_all(pd.uuid): [ 848.313619] env[69367]: ERROR nova.compute.manager [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 848.313619] env[69367]: ERROR nova.compute.manager [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] self.gen.throw(typ, value, traceback) [ 848.313619] env[69367]: ERROR nova.compute.manager [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 848.313619] env[69367]: ERROR nova.compute.manager [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] raise exception.ResourceProviderSyncFailed() [ 848.313619] env[69367]: ERROR nova.compute.manager [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 848.313619] env[69367]: ERROR nova.compute.manager [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] [ 848.316353] env[69367]: DEBUG oslo_concurrency.lockutils [None req-b99aa412-e4c2-4ca1-99d0-611e38feac1e tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.185s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 848.318869] env[69367]: INFO nova.compute.claims [None req-b99aa412-e4c2-4ca1-99d0-611e38feac1e tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: 42114002-28e0-408a-862e-547680ed479f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 848.331519] env[69367]: DEBUG oslo_vmware.api [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Task: {'id': task-4234070, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.460884] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Task: {'id': task-4234067, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.480072] env[69367]: DEBUG oslo_concurrency.lockutils [None req-06c99624-611a-4647-83e6-7287799f4e14 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 848.654330] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0bfe79aa-15cb-4630-92d3-2de33fe7c748 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 848.675611] env[69367]: DEBUG oslo_vmware.api [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Task: {'id': task-4234068, 'name': ReconfigVM_Task, 'duration_secs': 0.652046} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.675611] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] [instance: c272b0ae-6313-46ab-977c-6de255e77675] Reconfigured VM instance instance-0000004a to attach disk [datastore1] c272b0ae-6313-46ab-977c-6de255e77675/c272b0ae-6313-46ab-977c-6de255e77675.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 848.679145] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-64939316-818f-4b7a-8483-eaea84961b9e {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.695231] env[69367]: DEBUG oslo_vmware.api [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Waiting for the task: (returnval){ [ 848.695231] env[69367]: value = "task-4234072" [ 848.695231] env[69367]: _type = "Task" [ 848.695231] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.711895] env[69367]: DEBUG oslo_vmware.api [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Task: {'id': task-4234072, 'name': Rename_Task} progress is 6%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.781673] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Task: {'id': task-4234071, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.785062] env[69367]: DEBUG nova.network.neutron [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 848.790046] env[69367]: DEBUG nova.compute.manager [req-31f2735c-1abb-489f-9d9d-9dde1cf86b06 req-b80b27d2-748d-485a-8dae-a1dedd4d4315 service nova] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Received event network-vif-deleted-7485ac3f-c5a1-4b84-a33a-afb79101e2e9 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 848.790448] env[69367]: DEBUG nova.compute.manager [req-31f2735c-1abb-489f-9d9d-9dde1cf86b06 req-b80b27d2-748d-485a-8dae-a1dedd4d4315 service nova] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Received event network-vif-deleted-19be5951-2642-49af-aaa5-d15bc24f0434 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 848.815818] env[69367]: DEBUG oslo_vmware.api [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Task: {'id': task-4234070, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.824932] env[69367]: DEBUG oslo_concurrency.lockutils [None req-17171b96-ca57-47e5-925e-adb21efe7e04 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Lock "f8c07fa1-d27c-4d0f-847b-481477cd04bf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.687s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 848.963469] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Task: {'id': task-4234067, 'name': PowerOnVM_Task} progress is 78%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.039747] env[69367]: DEBUG nova.network.neutron [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] Updating instance_info_cache with network_info: [{"id": "783fd02a-aef2-4f21-aebf-723120f844c2", "address": "fa:16:3e:9f:8f:0d", "network": {"id": "28525544-bc2b-4838-b793-7438a0bc9915", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1953993669-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a1e0deb2e3174445993c4b644b0e6dab", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1712475b-e1be-49e0-9a18-febd305c90ad", "external-id": "nsx-vlan-transportzone-531", "segmentation_id": 531, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap783fd02a-ae", "ovs_interfaceid": "783fd02a-aef2-4f21-aebf-723120f844c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 849.210583] env[69367]: DEBUG oslo_vmware.api [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Task: {'id': task-4234072, 'name': Rename_Task, 'duration_secs': 0.258822} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.210915] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] [instance: c272b0ae-6313-46ab-977c-6de255e77675] Powering on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 849.211192] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6aafb194-13a7-43ee-963f-81322b00beb8 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.220049] env[69367]: DEBUG oslo_vmware.api [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Waiting for the task: (returnval){ [ 849.220049] env[69367]: value = "task-4234073" [ 849.220049] env[69367]: _type = "Task" [ 849.220049] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.232894] env[69367]: DEBUG oslo_vmware.api [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Task: {'id': task-4234073, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.280025] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Task: {'id': task-4234071, 'name': ReconfigVM_Task, 'duration_secs': 0.951187} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.280025] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 54a1f586-481d-427e-ba0b-be90e5573bd3] Reconfigured VM instance instance-0000004d to attach disk [datastore1] 54a1f586-481d-427e-ba0b-be90e5573bd3/54a1f586-481d-427e-ba0b-be90e5573bd3.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 849.280025] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4a9729e2-e433-43be-b5d7-d525afe68635 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.287666] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Waiting for the task: (returnval){ [ 849.287666] env[69367]: value = "task-4234074" [ 849.287666] env[69367]: _type = "Task" [ 849.287666] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.299193] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Task: {'id': task-4234074, 'name': Rename_Task} progress is 5%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.311968] env[69367]: DEBUG oslo_vmware.api [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Task: {'id': task-4234070, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.053369} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.315232] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore1] d900df05-b65c-4a45-94d1-563afbf9c022/d900df05-b65c-4a45-94d1-563afbf9c022.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 849.315232] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] [instance: d900df05-b65c-4a45-94d1-563afbf9c022] Extending root virtual disk to 1048576 {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 849.315232] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3ddbb5cc-e2a2-4cad-9bf7-24e9fb72f040 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.322632] env[69367]: DEBUG oslo_vmware.api [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Waiting for the task: (returnval){ [ 849.322632] env[69367]: value = "task-4234075" [ 849.322632] env[69367]: _type = "Task" [ 849.322632] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.344286] env[69367]: DEBUG oslo_vmware.api [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Task: {'id': task-4234075, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.361766] env[69367]: DEBUG nova.scheduler.client.report [None req-b99aa412-e4c2-4ca1-99d0-611e38feac1e tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 849.384025] env[69367]: DEBUG nova.scheduler.client.report [None req-b99aa412-e4c2-4ca1-99d0-611e38feac1e tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 849.384025] env[69367]: DEBUG nova.compute.provider_tree [None req-b99aa412-e4c2-4ca1-99d0-611e38feac1e tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 849.398226] env[69367]: DEBUG nova.scheduler.client.report [None req-b99aa412-e4c2-4ca1-99d0-611e38feac1e tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 849.419694] env[69367]: DEBUG nova.scheduler.client.report [None req-b99aa412-e4c2-4ca1-99d0-611e38feac1e tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 849.463059] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Task: {'id': task-4234067, 'name': PowerOnVM_Task, 'duration_secs': 1.90854} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.466094] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 46b6bc45-57f0-4850-9249-6bbb22b162c6] Powered on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 849.466322] env[69367]: INFO nova.compute.manager [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 46b6bc45-57f0-4850-9249-6bbb22b162c6] Took 11.25 seconds to spawn the instance on the hypervisor. [ 849.466700] env[69367]: DEBUG nova.compute.manager [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 46b6bc45-57f0-4850-9249-6bbb22b162c6] Checking state {{(pid=69367) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 849.468560] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bb8df46-4436-4d0c-9fce-b242faca9438 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.541650] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Releasing lock "refresh_cache-05aae150-5d86-4210-ae7e-8c63e83cb907" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 849.542203] env[69367]: DEBUG nova.compute.manager [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] Instance network_info: |[{"id": "783fd02a-aef2-4f21-aebf-723120f844c2", "address": "fa:16:3e:9f:8f:0d", "network": {"id": "28525544-bc2b-4838-b793-7438a0bc9915", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1953993669-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a1e0deb2e3174445993c4b644b0e6dab", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1712475b-e1be-49e0-9a18-febd305c90ad", "external-id": "nsx-vlan-transportzone-531", "segmentation_id": 531, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap783fd02a-ae", "ovs_interfaceid": "783fd02a-aef2-4f21-aebf-723120f844c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69367) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 849.542506] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9f:8f:0d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1712475b-e1be-49e0-9a18-febd305c90ad', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '783fd02a-aef2-4f21-aebf-723120f844c2', 'vif_model': 'vmxnet3'}] {{(pid=69367) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 849.550048] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Creating folder: Project (a1e0deb2e3174445993c4b644b0e6dab). Parent ref: group-v837645. {{(pid=69367) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 849.553080] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e27620fa-6c8c-4602-b830-b2dfffe811ad {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.567463] env[69367]: INFO nova.virt.vmwareapi.vm_util [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Created folder: Project (a1e0deb2e3174445993c4b644b0e6dab) in parent group-v837645. [ 849.567679] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Creating folder: Instances. Parent ref: group-v837748. {{(pid=69367) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 849.567931] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ceb6c0f6-6bb4-44c1-90e8-05d49a82dc18 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.578584] env[69367]: INFO nova.virt.vmwareapi.vm_util [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Created folder: Instances in parent group-v837748. [ 849.578844] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 849.579050] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] Creating VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 849.579531] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-227c4ee2-af27-42ec-82b9-55cde68aeb8b {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.604756] env[69367]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 849.604756] env[69367]: value = "task-4234078" [ 849.604756] env[69367]: _type = "Task" [ 849.604756] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.614636] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234078, 'name': CreateVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.733267] env[69367]: DEBUG oslo_vmware.api [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Task: {'id': task-4234073, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.799657] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Task: {'id': task-4234074, 'name': Rename_Task, 'duration_secs': 0.219653} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.800028] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 54a1f586-481d-427e-ba0b-be90e5573bd3] Powering on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 849.802959] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9d430950-d296-46f5-91f7-cad088daef9d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.811064] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Waiting for the task: (returnval){ [ 849.811064] env[69367]: value = "task-4234079" [ 849.811064] env[69367]: _type = "Task" [ 849.811064] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.820568] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Task: {'id': task-4234079, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.835547] env[69367]: DEBUG oslo_vmware.api [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Task: {'id': task-4234075, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.088033} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.835900] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] [instance: d900df05-b65c-4a45-94d1-563afbf9c022] Extended root virtual disk {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 849.836814] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aed5abe8-7015-4867-9638-60387bb7aff8 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.865422] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] [instance: d900df05-b65c-4a45-94d1-563afbf9c022] Reconfiguring VM instance instance-0000004e to attach disk [datastore1] d900df05-b65c-4a45-94d1-563afbf9c022/d900df05-b65c-4a45-94d1-563afbf9c022.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 849.870140] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3a131821-f224-42bf-9ef5-5984f9baa3c0 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.893830] env[69367]: DEBUG oslo_vmware.api [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Waiting for the task: (returnval){ [ 849.893830] env[69367]: value = "task-4234080" [ 849.893830] env[69367]: _type = "Task" [ 849.893830] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.907608] env[69367]: DEBUG oslo_vmware.api [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Task: {'id': task-4234080, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.924499] env[69367]: DEBUG nova.compute.manager [req-9c90b3fc-9961-498c-873e-fc9838d878bc req-fefc3e2b-434a-4e4a-a18d-a9a6f194b1cc service nova] [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] Received event network-changed-783fd02a-aef2-4f21-aebf-723120f844c2 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 849.924693] env[69367]: DEBUG nova.compute.manager [req-9c90b3fc-9961-498c-873e-fc9838d878bc req-fefc3e2b-434a-4e4a-a18d-a9a6f194b1cc service nova] [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] Refreshing instance network info cache due to event network-changed-783fd02a-aef2-4f21-aebf-723120f844c2. {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11772}} [ 849.924948] env[69367]: DEBUG oslo_concurrency.lockutils [req-9c90b3fc-9961-498c-873e-fc9838d878bc req-fefc3e2b-434a-4e4a-a18d-a9a6f194b1cc service nova] Acquiring lock "refresh_cache-05aae150-5d86-4210-ae7e-8c63e83cb907" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 849.925069] env[69367]: DEBUG oslo_concurrency.lockutils [req-9c90b3fc-9961-498c-873e-fc9838d878bc req-fefc3e2b-434a-4e4a-a18d-a9a6f194b1cc service nova] Acquired lock "refresh_cache-05aae150-5d86-4210-ae7e-8c63e83cb907" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 849.925227] env[69367]: DEBUG nova.network.neutron [req-9c90b3fc-9961-498c-873e-fc9838d878bc req-fefc3e2b-434a-4e4a-a18d-a9a6f194b1cc service nova] [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] Refreshing network info cache for port 783fd02a-aef2-4f21-aebf-723120f844c2 {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 849.957267] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3eef47cc-a598-41dd-a4b5-763012205557 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.969584] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa311502-8fee-4ea1-b735-37ca312084fd {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.013114] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f222a5b8-2947-4690-a67b-f16a93916451 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.022446] env[69367]: INFO nova.compute.manager [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 46b6bc45-57f0-4850-9249-6bbb22b162c6] Took 36.13 seconds to build instance. [ 850.027018] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f24aca08-c7fe-4fe9-aa5b-69fdeee2aeff {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.043983] env[69367]: DEBUG nova.compute.provider_tree [None req-b99aa412-e4c2-4ca1-99d0-611e38feac1e tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 850.119189] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234078, 'name': CreateVM_Task} progress is 99%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.233351] env[69367]: DEBUG oslo_vmware.api [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Task: {'id': task-4234073, 'name': PowerOnVM_Task} progress is 87%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.323662] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Task: {'id': task-4234079, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.370852] env[69367]: DEBUG oslo_concurrency.lockutils [None req-17171b96-ca57-47e5-925e-adb21efe7e04 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 850.407411] env[69367]: DEBUG oslo_vmware.api [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Task: {'id': task-4234080, 'name': ReconfigVM_Task, 'duration_secs': 0.478505} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.407828] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] [instance: d900df05-b65c-4a45-94d1-563afbf9c022] Reconfigured VM instance instance-0000004e to attach disk [datastore1] d900df05-b65c-4a45-94d1-563afbf9c022/d900df05-b65c-4a45-94d1-563afbf9c022.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 850.409708] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-86aec91f-bf8f-4d89-be47-3f0a97afe80d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.418419] env[69367]: DEBUG oslo_vmware.api [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Waiting for the task: (returnval){ [ 850.418419] env[69367]: value = "task-4234081" [ 850.418419] env[69367]: _type = "Task" [ 850.418419] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.432187] env[69367]: DEBUG oslo_vmware.api [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Task: {'id': task-4234081, 'name': Rename_Task} progress is 5%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.525312] env[69367]: DEBUG oslo_concurrency.lockutils [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Lock "46b6bc45-57f0-4850-9249-6bbb22b162c6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 49.406s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 850.586129] env[69367]: ERROR nova.scheduler.client.report [None req-b99aa412-e4c2-4ca1-99d0-611e38feac1e tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [req-8d9402a4-a6b7-478a-890c-74af339e3611] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-8d9402a4-a6b7-478a-890c-74af339e3611"}]} [ 850.586409] env[69367]: DEBUG oslo_concurrency.lockutils [None req-b99aa412-e4c2-4ca1-99d0-611e38feac1e tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.270s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 850.586869] env[69367]: ERROR nova.compute.manager [None req-b99aa412-e4c2-4ca1-99d0-611e38feac1e tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: 42114002-28e0-408a-862e-547680ed479f] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 850.586869] env[69367]: ERROR nova.compute.manager [instance: 42114002-28e0-408a-862e-547680ed479f] Traceback (most recent call last): [ 850.586869] env[69367]: ERROR nova.compute.manager [instance: 42114002-28e0-408a-862e-547680ed479f] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 850.586869] env[69367]: ERROR nova.compute.manager [instance: 42114002-28e0-408a-862e-547680ed479f] yield [ 850.586869] env[69367]: ERROR nova.compute.manager [instance: 42114002-28e0-408a-862e-547680ed479f] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 850.586869] env[69367]: ERROR nova.compute.manager [instance: 42114002-28e0-408a-862e-547680ed479f] self.set_inventory_for_provider( [ 850.586869] env[69367]: ERROR nova.compute.manager [instance: 42114002-28e0-408a-862e-547680ed479f] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 850.586869] env[69367]: ERROR nova.compute.manager [instance: 42114002-28e0-408a-862e-547680ed479f] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 850.586869] env[69367]: ERROR nova.compute.manager [instance: 42114002-28e0-408a-862e-547680ed479f] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-8d9402a4-a6b7-478a-890c-74af339e3611"}]} [ 850.586869] env[69367]: ERROR nova.compute.manager [instance: 42114002-28e0-408a-862e-547680ed479f] [ 850.586869] env[69367]: ERROR nova.compute.manager [instance: 42114002-28e0-408a-862e-547680ed479f] During handling of the above exception, another exception occurred: [ 850.586869] env[69367]: ERROR nova.compute.manager [instance: 42114002-28e0-408a-862e-547680ed479f] [ 850.586869] env[69367]: ERROR nova.compute.manager [instance: 42114002-28e0-408a-862e-547680ed479f] Traceback (most recent call last): [ 850.586869] env[69367]: ERROR nova.compute.manager [instance: 42114002-28e0-408a-862e-547680ed479f] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 850.586869] env[69367]: ERROR nova.compute.manager [instance: 42114002-28e0-408a-862e-547680ed479f] with self.rt.instance_claim(context, instance, node, allocs, [ 850.586869] env[69367]: ERROR nova.compute.manager [instance: 42114002-28e0-408a-862e-547680ed479f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 850.586869] env[69367]: ERROR nova.compute.manager [instance: 42114002-28e0-408a-862e-547680ed479f] return f(*args, **kwargs) [ 850.586869] env[69367]: ERROR nova.compute.manager [instance: 42114002-28e0-408a-862e-547680ed479f] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 850.586869] env[69367]: ERROR nova.compute.manager [instance: 42114002-28e0-408a-862e-547680ed479f] self._update(elevated, cn) [ 850.586869] env[69367]: ERROR nova.compute.manager [instance: 42114002-28e0-408a-862e-547680ed479f] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 850.586869] env[69367]: ERROR nova.compute.manager [instance: 42114002-28e0-408a-862e-547680ed479f] self._update_to_placement(context, compute_node, startup) [ 850.586869] env[69367]: ERROR nova.compute.manager [instance: 42114002-28e0-408a-862e-547680ed479f] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 850.586869] env[69367]: ERROR nova.compute.manager [instance: 42114002-28e0-408a-862e-547680ed479f] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 850.586869] env[69367]: ERROR nova.compute.manager [instance: 42114002-28e0-408a-862e-547680ed479f] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 850.586869] env[69367]: ERROR nova.compute.manager [instance: 42114002-28e0-408a-862e-547680ed479f] return attempt.get(self._wrap_exception) [ 850.586869] env[69367]: ERROR nova.compute.manager [instance: 42114002-28e0-408a-862e-547680ed479f] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 850.586869] env[69367]: ERROR nova.compute.manager [instance: 42114002-28e0-408a-862e-547680ed479f] six.reraise(self.value[0], self.value[1], self.value[2]) [ 850.586869] env[69367]: ERROR nova.compute.manager [instance: 42114002-28e0-408a-862e-547680ed479f] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 850.586869] env[69367]: ERROR nova.compute.manager [instance: 42114002-28e0-408a-862e-547680ed479f] raise value [ 850.586869] env[69367]: ERROR nova.compute.manager [instance: 42114002-28e0-408a-862e-547680ed479f] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 850.586869] env[69367]: ERROR nova.compute.manager [instance: 42114002-28e0-408a-862e-547680ed479f] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 850.586869] env[69367]: ERROR nova.compute.manager [instance: 42114002-28e0-408a-862e-547680ed479f] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 850.586869] env[69367]: ERROR nova.compute.manager [instance: 42114002-28e0-408a-862e-547680ed479f] self.reportclient.update_from_provider_tree( [ 850.586869] env[69367]: ERROR nova.compute.manager [instance: 42114002-28e0-408a-862e-547680ed479f] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 850.587791] env[69367]: ERROR nova.compute.manager [instance: 42114002-28e0-408a-862e-547680ed479f] with catch_all(pd.uuid): [ 850.587791] env[69367]: ERROR nova.compute.manager [instance: 42114002-28e0-408a-862e-547680ed479f] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 850.587791] env[69367]: ERROR nova.compute.manager [instance: 42114002-28e0-408a-862e-547680ed479f] self.gen.throw(typ, value, traceback) [ 850.587791] env[69367]: ERROR nova.compute.manager [instance: 42114002-28e0-408a-862e-547680ed479f] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 850.587791] env[69367]: ERROR nova.compute.manager [instance: 42114002-28e0-408a-862e-547680ed479f] raise exception.ResourceProviderSyncFailed() [ 850.587791] env[69367]: ERROR nova.compute.manager [instance: 42114002-28e0-408a-862e-547680ed479f] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 850.587791] env[69367]: ERROR nova.compute.manager [instance: 42114002-28e0-408a-862e-547680ed479f] [ 850.587791] env[69367]: DEBUG nova.compute.utils [None req-b99aa412-e4c2-4ca1-99d0-611e38feac1e tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: 42114002-28e0-408a-862e-547680ed479f] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 850.588846] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5585bf21-ce60-40c9-b64f-d5c29c2b3a79 tempest-ServersListShow2100Test-1441418592 tempest-ServersListShow2100Test-1441418592-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.319s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 850.591036] env[69367]: INFO nova.compute.claims [None req-5585bf21-ce60-40c9-b64f-d5c29c2b3a79 tempest-ServersListShow2100Test-1441418592 tempest-ServersListShow2100Test-1441418592-project-member] [instance: 097b74f5-19a1-41be-968d-19489ea9733c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 850.593987] env[69367]: DEBUG nova.compute.manager [None req-b99aa412-e4c2-4ca1-99d0-611e38feac1e tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: 42114002-28e0-408a-862e-547680ed479f] Build of instance 42114002-28e0-408a-862e-547680ed479f was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 850.594499] env[69367]: DEBUG nova.compute.manager [None req-b99aa412-e4c2-4ca1-99d0-611e38feac1e tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: 42114002-28e0-408a-862e-547680ed479f] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 850.594742] env[69367]: DEBUG oslo_concurrency.lockutils [None req-b99aa412-e4c2-4ca1-99d0-611e38feac1e tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Acquiring lock "refresh_cache-42114002-28e0-408a-862e-547680ed479f" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 850.594894] env[69367]: DEBUG oslo_concurrency.lockutils [None req-b99aa412-e4c2-4ca1-99d0-611e38feac1e tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Acquired lock "refresh_cache-42114002-28e0-408a-862e-547680ed479f" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 850.595064] env[69367]: DEBUG nova.network.neutron [None req-b99aa412-e4c2-4ca1-99d0-611e38feac1e tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: 42114002-28e0-408a-862e-547680ed479f] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 850.626158] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234078, 'name': CreateVM_Task, 'duration_secs': 0.603763} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.626158] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] Created VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 850.626158] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 850.626309] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 850.626618] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 850.626928] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-61ca302d-1fef-4a4e-be06-06806a10f6d1 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.633818] env[69367]: DEBUG oslo_vmware.api [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Waiting for the task: (returnval){ [ 850.633818] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]5283b0b4-4cac-f2e5-68e6-1ffeb46bdb75" [ 850.633818] env[69367]: _type = "Task" [ 850.633818] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.647269] env[69367]: DEBUG oslo_vmware.api [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]5283b0b4-4cac-f2e5-68e6-1ffeb46bdb75, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.735076] env[69367]: DEBUG oslo_vmware.api [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Task: {'id': task-4234073, 'name': PowerOnVM_Task, 'duration_secs': 1.15046} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.735395] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] [instance: c272b0ae-6313-46ab-977c-6de255e77675] Powered on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 850.735610] env[69367]: DEBUG nova.compute.manager [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] [instance: c272b0ae-6313-46ab-977c-6de255e77675] Checking state {{(pid=69367) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 850.736414] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85584246-1dde-43fd-b005-316563d00be5 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.823057] env[69367]: DEBUG oslo_vmware.api [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Task: {'id': task-4234079, 'name': PowerOnVM_Task, 'duration_secs': 0.674841} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.823317] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 54a1f586-481d-427e-ba0b-be90e5573bd3] Powered on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 850.823533] env[69367]: INFO nova.compute.manager [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 54a1f586-481d-427e-ba0b-be90e5573bd3] Took 10.00 seconds to spawn the instance on the hypervisor. [ 850.823719] env[69367]: DEBUG nova.compute.manager [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 54a1f586-481d-427e-ba0b-be90e5573bd3] Checking state {{(pid=69367) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 850.824535] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12b7637e-5de1-4ede-aecd-42f7693e51ae {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.888821] env[69367]: DEBUG nova.network.neutron [req-9c90b3fc-9961-498c-873e-fc9838d878bc req-fefc3e2b-434a-4e4a-a18d-a9a6f194b1cc service nova] [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] Updated VIF entry in instance network info cache for port 783fd02a-aef2-4f21-aebf-723120f844c2. {{(pid=69367) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 850.889313] env[69367]: DEBUG nova.network.neutron [req-9c90b3fc-9961-498c-873e-fc9838d878bc req-fefc3e2b-434a-4e4a-a18d-a9a6f194b1cc service nova] [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] Updating instance_info_cache with network_info: [{"id": "783fd02a-aef2-4f21-aebf-723120f844c2", "address": "fa:16:3e:9f:8f:0d", "network": {"id": "28525544-bc2b-4838-b793-7438a0bc9915", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1953993669-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a1e0deb2e3174445993c4b644b0e6dab", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1712475b-e1be-49e0-9a18-febd305c90ad", "external-id": "nsx-vlan-transportzone-531", "segmentation_id": 531, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap783fd02a-ae", "ovs_interfaceid": "783fd02a-aef2-4f21-aebf-723120f844c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 850.930196] env[69367]: DEBUG oslo_vmware.api [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Task: {'id': task-4234081, 'name': Rename_Task, 'duration_secs': 0.219464} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.930584] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] [instance: d900df05-b65c-4a45-94d1-563afbf9c022] Powering on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 850.931075] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-95e516e1-2c33-42fc-bc37-268922262b39 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.939409] env[69367]: DEBUG oslo_vmware.api [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Waiting for the task: (returnval){ [ 850.939409] env[69367]: value = "task-4234082" [ 850.939409] env[69367]: _type = "Task" [ 850.939409] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.949535] env[69367]: DEBUG oslo_vmware.api [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Task: {'id': task-4234082, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.129294] env[69367]: DEBUG oslo_concurrency.lockutils [None req-6fa38a37-a07b-410a-8e73-9c3faa03718d tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Acquiring lock "27684757-2b5f-4c20-901d-70a9e19cf4a5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 851.129646] env[69367]: DEBUG oslo_concurrency.lockutils [None req-6fa38a37-a07b-410a-8e73-9c3faa03718d tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Lock "27684757-2b5f-4c20-901d-70a9e19cf4a5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 851.141201] env[69367]: DEBUG nova.network.neutron [None req-b99aa412-e4c2-4ca1-99d0-611e38feac1e tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: 42114002-28e0-408a-862e-547680ed479f] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 851.148475] env[69367]: DEBUG oslo_vmware.api [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]5283b0b4-4cac-f2e5-68e6-1ffeb46bdb75, 'name': SearchDatastore_Task, 'duration_secs': 0.013442} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.152858] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 851.153232] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] Processing image 2b099420-9152-4d93-9609-4c9317824c11 {{(pid=69367) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 851.157025] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 851.157025] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 851.157025] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 851.157025] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dbbf2f9c-cb16-40e5-9c41-c08bb8c9e2fb {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.165915] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 851.166148] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69367) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 851.166925] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cb9ec0e4-7066-4950-82d0-8c15786746d3 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.174078] env[69367]: DEBUG oslo_vmware.api [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Waiting for the task: (returnval){ [ 851.174078] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52d9e82b-1755-2744-1997-eaf97fa3378d" [ 851.174078] env[69367]: _type = "Task" [ 851.174078] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.184811] env[69367]: DEBUG oslo_vmware.api [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52d9e82b-1755-2744-1997-eaf97fa3378d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.257986] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 851.290859] env[69367]: DEBUG nova.network.neutron [None req-b99aa412-e4c2-4ca1-99d0-611e38feac1e tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: 42114002-28e0-408a-862e-547680ed479f] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 851.351133] env[69367]: INFO nova.compute.manager [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 54a1f586-481d-427e-ba0b-be90e5573bd3] Took 35.09 seconds to build instance. [ 851.393201] env[69367]: DEBUG oslo_concurrency.lockutils [req-9c90b3fc-9961-498c-873e-fc9838d878bc req-fefc3e2b-434a-4e4a-a18d-a9a6f194b1cc service nova] Releasing lock "refresh_cache-05aae150-5d86-4210-ae7e-8c63e83cb907" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 851.452278] env[69367]: DEBUG oslo_vmware.api [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Task: {'id': task-4234082, 'name': PowerOnVM_Task} progress is 87%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.634195] env[69367]: DEBUG nova.compute.manager [None req-6fa38a37-a07b-410a-8e73-9c3faa03718d tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 27684757-2b5f-4c20-901d-70a9e19cf4a5] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 851.637075] env[69367]: DEBUG nova.scheduler.client.report [None req-5585bf21-ce60-40c9-b64f-d5c29c2b3a79 tempest-ServersListShow2100Test-1441418592 tempest-ServersListShow2100Test-1441418592-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 851.661170] env[69367]: DEBUG nova.scheduler.client.report [None req-5585bf21-ce60-40c9-b64f-d5c29c2b3a79 tempest-ServersListShow2100Test-1441418592 tempest-ServersListShow2100Test-1441418592-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 851.661451] env[69367]: DEBUG nova.compute.provider_tree [None req-5585bf21-ce60-40c9-b64f-d5c29c2b3a79 tempest-ServersListShow2100Test-1441418592 tempest-ServersListShow2100Test-1441418592-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 851.676038] env[69367]: DEBUG nova.scheduler.client.report [None req-5585bf21-ce60-40c9-b64f-d5c29c2b3a79 tempest-ServersListShow2100Test-1441418592 tempest-ServersListShow2100Test-1441418592-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 851.690698] env[69367]: DEBUG oslo_vmware.api [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52d9e82b-1755-2744-1997-eaf97fa3378d, 'name': SearchDatastore_Task, 'duration_secs': 0.015123} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.692359] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3711704e-8af5-4649-8058-199fc939cd37 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.699495] env[69367]: DEBUG oslo_vmware.api [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Waiting for the task: (returnval){ [ 851.699495] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52be3de1-124b-9ce0-2020-72f1dc567ed7" [ 851.699495] env[69367]: _type = "Task" [ 851.699495] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.710277] env[69367]: DEBUG nova.scheduler.client.report [None req-5585bf21-ce60-40c9-b64f-d5c29c2b3a79 tempest-ServersListShow2100Test-1441418592 tempest-ServersListShow2100Test-1441418592-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 851.718352] env[69367]: DEBUG oslo_vmware.api [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52be3de1-124b-9ce0-2020-72f1dc567ed7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.794613] env[69367]: DEBUG oslo_concurrency.lockutils [None req-b99aa412-e4c2-4ca1-99d0-611e38feac1e tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Releasing lock "refresh_cache-42114002-28e0-408a-862e-547680ed479f" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 851.794873] env[69367]: DEBUG nova.compute.manager [None req-b99aa412-e4c2-4ca1-99d0-611e38feac1e tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 851.795069] env[69367]: DEBUG nova.compute.manager [None req-b99aa412-e4c2-4ca1-99d0-611e38feac1e tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: 42114002-28e0-408a-862e-547680ed479f] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 851.795246] env[69367]: DEBUG nova.network.neutron [None req-b99aa412-e4c2-4ca1-99d0-611e38feac1e tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: 42114002-28e0-408a-862e-547680ed479f] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 851.817290] env[69367]: DEBUG nova.network.neutron [None req-b99aa412-e4c2-4ca1-99d0-611e38feac1e tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: 42114002-28e0-408a-862e-547680ed479f] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 851.851929] env[69367]: DEBUG oslo_concurrency.lockutils [None req-f59be8dc-f5d1-4cae-9cae-d988f44f7986 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Lock "54a1f586-481d-427e-ba0b-be90e5573bd3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 50.685s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 851.950075] env[69367]: DEBUG oslo_vmware.api [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Task: {'id': task-4234082, 'name': PowerOnVM_Task, 'duration_secs': 0.829455} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.950453] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] [instance: d900df05-b65c-4a45-94d1-563afbf9c022] Powered on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 851.950706] env[69367]: INFO nova.compute.manager [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] [instance: d900df05-b65c-4a45-94d1-563afbf9c022] Took 7.90 seconds to spawn the instance on the hypervisor. [ 851.950840] env[69367]: DEBUG nova.compute.manager [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] [instance: d900df05-b65c-4a45-94d1-563afbf9c022] Checking state {{(pid=69367) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 851.954655] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19f9a26b-5aec-476f-bf72-1b763bc51a6f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.165789] env[69367]: DEBUG oslo_concurrency.lockutils [None req-6fa38a37-a07b-410a-8e73-9c3faa03718d tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 852.188364] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-673f11f1-f847-4f37-87a2-51277115a7df {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.198148] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cb3fd53-cf4d-4af6-8c14-86f0889c9658 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.212787] env[69367]: DEBUG oslo_vmware.api [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52be3de1-124b-9ce0-2020-72f1dc567ed7, 'name': SearchDatastore_Task, 'duration_secs': 0.017889} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.240490] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 852.241979] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore2] 05aae150-5d86-4210-ae7e-8c63e83cb907/05aae150-5d86-4210-ae7e-8c63e83cb907.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 852.241979] env[69367]: DEBUG oslo_concurrency.lockutils [None req-9b5e4fb4-e357-4ec1-97d9-90f67718b83d tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Acquiring lock "4a46d003-f57e-4089-aa60-757a4246f071" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 852.241979] env[69367]: DEBUG oslo_concurrency.lockutils [None req-9b5e4fb4-e357-4ec1-97d9-90f67718b83d tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Lock "4a46d003-f57e-4089-aa60-757a4246f071" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 852.241979] env[69367]: DEBUG oslo_concurrency.lockutils [None req-9b5e4fb4-e357-4ec1-97d9-90f67718b83d tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Acquiring lock "4a46d003-f57e-4089-aa60-757a4246f071-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 852.242278] env[69367]: DEBUG oslo_concurrency.lockutils [None req-9b5e4fb4-e357-4ec1-97d9-90f67718b83d tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Lock "4a46d003-f57e-4089-aa60-757a4246f071-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 852.242414] env[69367]: DEBUG oslo_concurrency.lockutils [None req-9b5e4fb4-e357-4ec1-97d9-90f67718b83d tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Lock "4a46d003-f57e-4089-aa60-757a4246f071-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 852.244914] env[69367]: INFO nova.compute.manager [None req-9b5e4fb4-e357-4ec1-97d9-90f67718b83d tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 4a46d003-f57e-4089-aa60-757a4246f071] Terminating instance [ 852.246722] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-27a5ec1e-8c4e-4ded-83e3-3581cc31f397 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.250048] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f8daa29-e332-4425-a2e1-a2a9e2ae11e6 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.263095] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f5d417d-9dc4-470f-b74b-5355b7043ddd {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.268842] env[69367]: DEBUG oslo_vmware.api [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Waiting for the task: (returnval){ [ 852.268842] env[69367]: value = "task-4234083" [ 852.268842] env[69367]: _type = "Task" [ 852.268842] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.285453] env[69367]: DEBUG nova.compute.provider_tree [None req-5585bf21-ce60-40c9-b64f-d5c29c2b3a79 tempest-ServersListShow2100Test-1441418592 tempest-ServersListShow2100Test-1441418592-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 852.295273] env[69367]: DEBUG oslo_vmware.api [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Task: {'id': task-4234083, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.323243] env[69367]: DEBUG nova.network.neutron [None req-b99aa412-e4c2-4ca1-99d0-611e38feac1e tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: 42114002-28e0-408a-862e-547680ed479f] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 852.397090] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e35df00c-08dc-425a-a9e2-95b6e24a7398 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Acquiring lock "c272b0ae-6313-46ab-977c-6de255e77675" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 852.397295] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e35df00c-08dc-425a-a9e2-95b6e24a7398 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Lock "c272b0ae-6313-46ab-977c-6de255e77675" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 852.397600] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e35df00c-08dc-425a-a9e2-95b6e24a7398 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Acquiring lock "c272b0ae-6313-46ab-977c-6de255e77675-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 852.397877] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e35df00c-08dc-425a-a9e2-95b6e24a7398 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Lock "c272b0ae-6313-46ab-977c-6de255e77675-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 852.398187] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e35df00c-08dc-425a-a9e2-95b6e24a7398 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Lock "c272b0ae-6313-46ab-977c-6de255e77675-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 852.401238] env[69367]: INFO nova.compute.manager [None req-e35df00c-08dc-425a-a9e2-95b6e24a7398 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] [instance: c272b0ae-6313-46ab-977c-6de255e77675] Terminating instance [ 852.473691] env[69367]: INFO nova.compute.manager [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] [instance: d900df05-b65c-4a45-94d1-563afbf9c022] Took 30.63 seconds to build instance. [ 852.757913] env[69367]: DEBUG nova.compute.manager [None req-9b5e4fb4-e357-4ec1-97d9-90f67718b83d tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 4a46d003-f57e-4089-aa60-757a4246f071] Start destroying the instance on the hypervisor. {{(pid=69367) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 852.758111] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-9b5e4fb4-e357-4ec1-97d9-90f67718b83d tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 4a46d003-f57e-4089-aa60-757a4246f071] Destroying instance {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 852.759387] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-298830da-eaf0-4353-99ed-c245795b2aa1 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.770448] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b5e4fb4-e357-4ec1-97d9-90f67718b83d tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 4a46d003-f57e-4089-aa60-757a4246f071] Powering off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 852.775143] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1aa8df97-2373-4dda-b276-f5c591f0a9d3 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.783184] env[69367]: DEBUG oslo_vmware.api [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Task: {'id': task-4234083, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.784820] env[69367]: DEBUG oslo_vmware.api [None req-9b5e4fb4-e357-4ec1-97d9-90f67718b83d tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Waiting for the task: (returnval){ [ 852.784820] env[69367]: value = "task-4234084" [ 852.784820] env[69367]: _type = "Task" [ 852.784820] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.801097] env[69367]: DEBUG oslo_vmware.api [None req-9b5e4fb4-e357-4ec1-97d9-90f67718b83d tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Task: {'id': task-4234084, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.815116] env[69367]: ERROR nova.scheduler.client.report [None req-5585bf21-ce60-40c9-b64f-d5c29c2b3a79 tempest-ServersListShow2100Test-1441418592 tempest-ServersListShow2100Test-1441418592-project-member] [req-b81f8cb5-fa3d-47e5-8aa4-1a0b74390f63] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-b81f8cb5-fa3d-47e5-8aa4-1a0b74390f63"}]} [ 852.815538] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5585bf21-ce60-40c9-b64f-d5c29c2b3a79 tempest-ServersListShow2100Test-1441418592 tempest-ServersListShow2100Test-1441418592-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.227s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 852.816172] env[69367]: ERROR nova.compute.manager [None req-5585bf21-ce60-40c9-b64f-d5c29c2b3a79 tempest-ServersListShow2100Test-1441418592 tempest-ServersListShow2100Test-1441418592-project-member] [instance: 097b74f5-19a1-41be-968d-19489ea9733c] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 852.816172] env[69367]: ERROR nova.compute.manager [instance: 097b74f5-19a1-41be-968d-19489ea9733c] Traceback (most recent call last): [ 852.816172] env[69367]: ERROR nova.compute.manager [instance: 097b74f5-19a1-41be-968d-19489ea9733c] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 852.816172] env[69367]: ERROR nova.compute.manager [instance: 097b74f5-19a1-41be-968d-19489ea9733c] yield [ 852.816172] env[69367]: ERROR nova.compute.manager [instance: 097b74f5-19a1-41be-968d-19489ea9733c] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 852.816172] env[69367]: ERROR nova.compute.manager [instance: 097b74f5-19a1-41be-968d-19489ea9733c] self.set_inventory_for_provider( [ 852.816172] env[69367]: ERROR nova.compute.manager [instance: 097b74f5-19a1-41be-968d-19489ea9733c] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 852.816172] env[69367]: ERROR nova.compute.manager [instance: 097b74f5-19a1-41be-968d-19489ea9733c] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 852.816172] env[69367]: ERROR nova.compute.manager [instance: 097b74f5-19a1-41be-968d-19489ea9733c] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-b81f8cb5-fa3d-47e5-8aa4-1a0b74390f63"}]} [ 852.816172] env[69367]: ERROR nova.compute.manager [instance: 097b74f5-19a1-41be-968d-19489ea9733c] [ 852.816172] env[69367]: ERROR nova.compute.manager [instance: 097b74f5-19a1-41be-968d-19489ea9733c] During handling of the above exception, another exception occurred: [ 852.816172] env[69367]: ERROR nova.compute.manager [instance: 097b74f5-19a1-41be-968d-19489ea9733c] [ 852.816172] env[69367]: ERROR nova.compute.manager [instance: 097b74f5-19a1-41be-968d-19489ea9733c] Traceback (most recent call last): [ 852.816172] env[69367]: ERROR nova.compute.manager [instance: 097b74f5-19a1-41be-968d-19489ea9733c] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 852.816172] env[69367]: ERROR nova.compute.manager [instance: 097b74f5-19a1-41be-968d-19489ea9733c] with self.rt.instance_claim(context, instance, node, allocs, [ 852.816172] env[69367]: ERROR nova.compute.manager [instance: 097b74f5-19a1-41be-968d-19489ea9733c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 852.816172] env[69367]: ERROR nova.compute.manager [instance: 097b74f5-19a1-41be-968d-19489ea9733c] return f(*args, **kwargs) [ 852.816172] env[69367]: ERROR nova.compute.manager [instance: 097b74f5-19a1-41be-968d-19489ea9733c] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 852.816172] env[69367]: ERROR nova.compute.manager [instance: 097b74f5-19a1-41be-968d-19489ea9733c] self._update(elevated, cn) [ 852.816172] env[69367]: ERROR nova.compute.manager [instance: 097b74f5-19a1-41be-968d-19489ea9733c] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 852.816172] env[69367]: ERROR nova.compute.manager [instance: 097b74f5-19a1-41be-968d-19489ea9733c] self._update_to_placement(context, compute_node, startup) [ 852.816172] env[69367]: ERROR nova.compute.manager [instance: 097b74f5-19a1-41be-968d-19489ea9733c] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 852.816172] env[69367]: ERROR nova.compute.manager [instance: 097b74f5-19a1-41be-968d-19489ea9733c] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 852.816172] env[69367]: ERROR nova.compute.manager [instance: 097b74f5-19a1-41be-968d-19489ea9733c] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 852.816172] env[69367]: ERROR nova.compute.manager [instance: 097b74f5-19a1-41be-968d-19489ea9733c] return attempt.get(self._wrap_exception) [ 852.816172] env[69367]: ERROR nova.compute.manager [instance: 097b74f5-19a1-41be-968d-19489ea9733c] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 852.816172] env[69367]: ERROR nova.compute.manager [instance: 097b74f5-19a1-41be-968d-19489ea9733c] six.reraise(self.value[0], self.value[1], self.value[2]) [ 852.816172] env[69367]: ERROR nova.compute.manager [instance: 097b74f5-19a1-41be-968d-19489ea9733c] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 852.816172] env[69367]: ERROR nova.compute.manager [instance: 097b74f5-19a1-41be-968d-19489ea9733c] raise value [ 852.816172] env[69367]: ERROR nova.compute.manager [instance: 097b74f5-19a1-41be-968d-19489ea9733c] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 852.816172] env[69367]: ERROR nova.compute.manager [instance: 097b74f5-19a1-41be-968d-19489ea9733c] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 852.816172] env[69367]: ERROR nova.compute.manager [instance: 097b74f5-19a1-41be-968d-19489ea9733c] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 852.816172] env[69367]: ERROR nova.compute.manager [instance: 097b74f5-19a1-41be-968d-19489ea9733c] self.reportclient.update_from_provider_tree( [ 852.816172] env[69367]: ERROR nova.compute.manager [instance: 097b74f5-19a1-41be-968d-19489ea9733c] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 852.817289] env[69367]: ERROR nova.compute.manager [instance: 097b74f5-19a1-41be-968d-19489ea9733c] with catch_all(pd.uuid): [ 852.817289] env[69367]: ERROR nova.compute.manager [instance: 097b74f5-19a1-41be-968d-19489ea9733c] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 852.817289] env[69367]: ERROR nova.compute.manager [instance: 097b74f5-19a1-41be-968d-19489ea9733c] self.gen.throw(typ, value, traceback) [ 852.817289] env[69367]: ERROR nova.compute.manager [instance: 097b74f5-19a1-41be-968d-19489ea9733c] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 852.817289] env[69367]: ERROR nova.compute.manager [instance: 097b74f5-19a1-41be-968d-19489ea9733c] raise exception.ResourceProviderSyncFailed() [ 852.817289] env[69367]: ERROR nova.compute.manager [instance: 097b74f5-19a1-41be-968d-19489ea9733c] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 852.817289] env[69367]: ERROR nova.compute.manager [instance: 097b74f5-19a1-41be-968d-19489ea9733c] [ 852.817289] env[69367]: DEBUG nova.compute.utils [None req-5585bf21-ce60-40c9-b64f-d5c29c2b3a79 tempest-ServersListShow2100Test-1441418592 tempest-ServersListShow2100Test-1441418592-project-member] [instance: 097b74f5-19a1-41be-968d-19489ea9733c] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 852.818326] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5fd10dcf-a94e-4432-b8c9-227cd572cc3a tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.285s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 852.818564] env[69367]: DEBUG nova.objects.instance [None req-5fd10dcf-a94e-4432-b8c9-227cd572cc3a tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Lazy-loading 'resources' on Instance uuid d2f8328d-fd05-4e63-9cbd-a6e3ec948964 {{(pid=69367) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 852.820144] env[69367]: DEBUG nova.compute.manager [None req-5585bf21-ce60-40c9-b64f-d5c29c2b3a79 tempest-ServersListShow2100Test-1441418592 tempest-ServersListShow2100Test-1441418592-project-member] [instance: 097b74f5-19a1-41be-968d-19489ea9733c] Build of instance 097b74f5-19a1-41be-968d-19489ea9733c was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 852.820661] env[69367]: DEBUG nova.compute.manager [None req-5585bf21-ce60-40c9-b64f-d5c29c2b3a79 tempest-ServersListShow2100Test-1441418592 tempest-ServersListShow2100Test-1441418592-project-member] [instance: 097b74f5-19a1-41be-968d-19489ea9733c] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 852.820913] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5585bf21-ce60-40c9-b64f-d5c29c2b3a79 tempest-ServersListShow2100Test-1441418592 tempest-ServersListShow2100Test-1441418592-project-member] Acquiring lock "refresh_cache-097b74f5-19a1-41be-968d-19489ea9733c" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 852.821079] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5585bf21-ce60-40c9-b64f-d5c29c2b3a79 tempest-ServersListShow2100Test-1441418592 tempest-ServersListShow2100Test-1441418592-project-member] Acquired lock "refresh_cache-097b74f5-19a1-41be-968d-19489ea9733c" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 852.821255] env[69367]: DEBUG nova.network.neutron [None req-5585bf21-ce60-40c9-b64f-d5c29c2b3a79 tempest-ServersListShow2100Test-1441418592 tempest-ServersListShow2100Test-1441418592-project-member] [instance: 097b74f5-19a1-41be-968d-19489ea9733c] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 852.826160] env[69367]: INFO nova.compute.manager [None req-b99aa412-e4c2-4ca1-99d0-611e38feac1e tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: 42114002-28e0-408a-862e-547680ed479f] Took 1.03 seconds to deallocate network for instance. [ 852.906940] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e35df00c-08dc-425a-a9e2-95b6e24a7398 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Acquiring lock "refresh_cache-c272b0ae-6313-46ab-977c-6de255e77675" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 852.907233] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e35df00c-08dc-425a-a9e2-95b6e24a7398 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Acquired lock "refresh_cache-c272b0ae-6313-46ab-977c-6de255e77675" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 852.907295] env[69367]: DEBUG nova.network.neutron [None req-e35df00c-08dc-425a-a9e2-95b6e24a7398 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] [instance: c272b0ae-6313-46ab-977c-6de255e77675] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 852.975937] env[69367]: DEBUG oslo_concurrency.lockutils [None req-1596796d-0651-46e9-83da-c6c399eb51e5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Lock "d900df05-b65c-4a45-94d1-563afbf9c022" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 51.662s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 853.209685] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fbe9aafe-7468-4383-a4ac-3c19fa16b89a tempest-ServerShowV257Test-1573549382 tempest-ServerShowV257Test-1573549382-project-member] Acquiring lock "b5fa1af4-0295-49ed-a101-7810e507bf64" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 853.210056] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fbe9aafe-7468-4383-a4ac-3c19fa16b89a tempest-ServerShowV257Test-1573549382 tempest-ServerShowV257Test-1573549382-project-member] Lock "b5fa1af4-0295-49ed-a101-7810e507bf64" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 853.282049] env[69367]: DEBUG oslo_vmware.api [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Task: {'id': task-4234083, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.745365} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.282432] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore2] 05aae150-5d86-4210-ae7e-8c63e83cb907/05aae150-5d86-4210-ae7e-8c63e83cb907.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 853.282656] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] Extending root virtual disk to 1048576 {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 853.282977] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e1e21605-a446-4b2d-b0e2-764acbfb31b1 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.295482] env[69367]: DEBUG oslo_vmware.api [None req-9b5e4fb4-e357-4ec1-97d9-90f67718b83d tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Task: {'id': task-4234084, 'name': PowerOffVM_Task, 'duration_secs': 0.387902} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.297041] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b5e4fb4-e357-4ec1-97d9-90f67718b83d tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 4a46d003-f57e-4089-aa60-757a4246f071] Powered off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 853.297231] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-9b5e4fb4-e357-4ec1-97d9-90f67718b83d tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 4a46d003-f57e-4089-aa60-757a4246f071] Unregistering the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 853.297561] env[69367]: DEBUG oslo_vmware.api [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Waiting for the task: (returnval){ [ 853.297561] env[69367]: value = "task-4234085" [ 853.297561] env[69367]: _type = "Task" [ 853.297561] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.297807] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-82f01246-f113-41fb-aee0-8d13698c7e7b {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.311540] env[69367]: DEBUG oslo_vmware.api [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Task: {'id': task-4234085, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.345108] env[69367]: DEBUG nova.scheduler.client.report [None req-5fd10dcf-a94e-4432-b8c9-227cd572cc3a tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 853.350629] env[69367]: DEBUG nova.network.neutron [None req-5585bf21-ce60-40c9-b64f-d5c29c2b3a79 tempest-ServersListShow2100Test-1441418592 tempest-ServersListShow2100Test-1441418592-project-member] [instance: 097b74f5-19a1-41be-968d-19489ea9733c] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 853.364963] env[69367]: DEBUG nova.scheduler.client.report [None req-5fd10dcf-a94e-4432-b8c9-227cd572cc3a tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 853.365541] env[69367]: DEBUG nova.compute.provider_tree [None req-5fd10dcf-a94e-4432-b8c9-227cd572cc3a tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 853.386020] env[69367]: DEBUG nova.scheduler.client.report [None req-5fd10dcf-a94e-4432-b8c9-227cd572cc3a tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 853.389090] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-9b5e4fb4-e357-4ec1-97d9-90f67718b83d tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 4a46d003-f57e-4089-aa60-757a4246f071] Unregistered the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 853.389443] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-9b5e4fb4-e357-4ec1-97d9-90f67718b83d tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 4a46d003-f57e-4089-aa60-757a4246f071] Deleting contents of the VM from datastore datastore2 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 853.389737] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b5e4fb4-e357-4ec1-97d9-90f67718b83d tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Deleting the datastore file [datastore2] 4a46d003-f57e-4089-aa60-757a4246f071 {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 853.390132] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6765bdbb-0954-445e-b4ab-b465258a4282 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.399230] env[69367]: DEBUG oslo_vmware.api [None req-9b5e4fb4-e357-4ec1-97d9-90f67718b83d tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Waiting for the task: (returnval){ [ 853.399230] env[69367]: value = "task-4234087" [ 853.399230] env[69367]: _type = "Task" [ 853.399230] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.411765] env[69367]: DEBUG oslo_vmware.api [None req-9b5e4fb4-e357-4ec1-97d9-90f67718b83d tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Task: {'id': task-4234087, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.412899] env[69367]: DEBUG nova.network.neutron [None req-5585bf21-ce60-40c9-b64f-d5c29c2b3a79 tempest-ServersListShow2100Test-1441418592 tempest-ServersListShow2100Test-1441418592-project-member] [instance: 097b74f5-19a1-41be-968d-19489ea9733c] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 853.414897] env[69367]: DEBUG nova.scheduler.client.report [None req-5fd10dcf-a94e-4432-b8c9-227cd572cc3a tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 853.438108] env[69367]: DEBUG nova.network.neutron [None req-e35df00c-08dc-425a-a9e2-95b6e24a7398 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] [instance: c272b0ae-6313-46ab-977c-6de255e77675] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 853.491058] env[69367]: DEBUG nova.network.neutron [None req-e35df00c-08dc-425a-a9e2-95b6e24a7398 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] [instance: c272b0ae-6313-46ab-977c-6de255e77675] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 853.609643] env[69367]: DEBUG oslo_concurrency.lockutils [None req-9a266199-78e6-4e87-9d42-759928f6556d tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Acquiring lock "2086bd49-6926-4466-9ad0-74f9dbc8b31a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 853.609887] env[69367]: DEBUG oslo_concurrency.lockutils [None req-9a266199-78e6-4e87-9d42-759928f6556d tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Lock "2086bd49-6926-4466-9ad0-74f9dbc8b31a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 853.712702] env[69367]: DEBUG nova.compute.manager [None req-fbe9aafe-7468-4383-a4ac-3c19fa16b89a tempest-ServerShowV257Test-1573549382 tempest-ServerShowV257Test-1573549382-project-member] [instance: b5fa1af4-0295-49ed-a101-7810e507bf64] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 853.810844] env[69367]: DEBUG oslo_vmware.api [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Task: {'id': task-4234085, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079626} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.813755] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] Extended root virtual disk {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 853.814784] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fa9a011-676c-4e20-9e5f-35c81d8ab86d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.838826] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] Reconfiguring VM instance instance-0000004f to attach disk [datastore2] 05aae150-5d86-4210-ae7e-8c63e83cb907/05aae150-5d86-4210-ae7e-8c63e83cb907.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 853.844140] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-29c2984f-e39d-47b0-b383-dfbf8b9d83f6 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.866051] env[69367]: DEBUG oslo_vmware.api [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Waiting for the task: (returnval){ [ 853.866051] env[69367]: value = "task-4234088" [ 853.866051] env[69367]: _type = "Task" [ 853.866051] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.873657] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d40240b-7cf3-4aff-9583-dcda4af5a567 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.880578] env[69367]: DEBUG oslo_vmware.api [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Task: {'id': task-4234088, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.881517] env[69367]: INFO nova.scheduler.client.report [None req-b99aa412-e4c2-4ca1-99d0-611e38feac1e tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Deleted allocations for instance 42114002-28e0-408a-862e-547680ed479f [ 853.894353] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6f43fec-1c70-4513-9c24-c06e88eba1f0 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.930184] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5585bf21-ce60-40c9-b64f-d5c29c2b3a79 tempest-ServersListShow2100Test-1441418592 tempest-ServersListShow2100Test-1441418592-project-member] Releasing lock "refresh_cache-097b74f5-19a1-41be-968d-19489ea9733c" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 853.930484] env[69367]: DEBUG nova.compute.manager [None req-5585bf21-ce60-40c9-b64f-d5c29c2b3a79 tempest-ServersListShow2100Test-1441418592 tempest-ServersListShow2100Test-1441418592-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 853.930719] env[69367]: DEBUG nova.compute.manager [None req-5585bf21-ce60-40c9-b64f-d5c29c2b3a79 tempest-ServersListShow2100Test-1441418592 tempest-ServersListShow2100Test-1441418592-project-member] [instance: 097b74f5-19a1-41be-968d-19489ea9733c] Skipping network deallocation for instance since networking was not requested. {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 853.937681] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-108ee7ca-b1a5-4b46-bd8c-1d297d43bbe8 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.946995] env[69367]: DEBUG oslo_vmware.api [None req-9b5e4fb4-e357-4ec1-97d9-90f67718b83d tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Task: {'id': task-4234087, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.950538] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75e04604-1a76-4863-a582-26d6b11ea0cf {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.967272] env[69367]: DEBUG nova.compute.provider_tree [None req-5fd10dcf-a94e-4432-b8c9-227cd572cc3a tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 853.993378] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e35df00c-08dc-425a-a9e2-95b6e24a7398 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Releasing lock "refresh_cache-c272b0ae-6313-46ab-977c-6de255e77675" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 853.993807] env[69367]: DEBUG nova.compute.manager [None req-e35df00c-08dc-425a-a9e2-95b6e24a7398 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] [instance: c272b0ae-6313-46ab-977c-6de255e77675] Start destroying the instance on the hypervisor. {{(pid=69367) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 853.994011] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-e35df00c-08dc-425a-a9e2-95b6e24a7398 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] [instance: c272b0ae-6313-46ab-977c-6de255e77675] Destroying instance {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 853.994933] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a03fcf76-2f1a-4891-a772-88af221f955e {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.003514] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-e35df00c-08dc-425a-a9e2-95b6e24a7398 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] [instance: c272b0ae-6313-46ab-977c-6de255e77675] Powering off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 854.003671] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ca82244b-fa0d-447e-9ab7-41ef1c76bfa0 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.012247] env[69367]: DEBUG oslo_vmware.api [None req-e35df00c-08dc-425a-a9e2-95b6e24a7398 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Waiting for the task: (returnval){ [ 854.012247] env[69367]: value = "task-4234089" [ 854.012247] env[69367]: _type = "Task" [ 854.012247] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.021212] env[69367]: DEBUG oslo_vmware.api [None req-e35df00c-08dc-425a-a9e2-95b6e24a7398 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Task: {'id': task-4234089, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.116422] env[69367]: DEBUG nova.compute.manager [None req-9a266199-78e6-4e87-9d42-759928f6556d tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] [instance: 2086bd49-6926-4466-9ad0-74f9dbc8b31a] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 854.234928] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fbe9aafe-7468-4383-a4ac-3c19fa16b89a tempest-ServerShowV257Test-1573549382 tempest-ServerShowV257Test-1573549382-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 854.377616] env[69367]: DEBUG oslo_vmware.api [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Task: {'id': task-4234088, 'name': ReconfigVM_Task, 'duration_secs': 0.499244} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.377948] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] Reconfigured VM instance instance-0000004f to attach disk [datastore2] 05aae150-5d86-4210-ae7e-8c63e83cb907/05aae150-5d86-4210-ae7e-8c63e83cb907.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 854.378784] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9087d4e5-9392-44e7-8a90-09a32e368bf5 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.387923] env[69367]: DEBUG oslo_vmware.api [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Waiting for the task: (returnval){ [ 854.387923] env[69367]: value = "task-4234090" [ 854.387923] env[69367]: _type = "Task" [ 854.387923] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.392048] env[69367]: DEBUG oslo_concurrency.lockutils [None req-b99aa412-e4c2-4ca1-99d0-611e38feac1e tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Lock "42114002-28e0-408a-862e-547680ed479f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.266s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 854.401730] env[69367]: DEBUG oslo_vmware.api [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Task: {'id': task-4234090, 'name': Rename_Task} progress is 6%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.440195] env[69367]: DEBUG oslo_vmware.api [None req-9b5e4fb4-e357-4ec1-97d9-90f67718b83d tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Task: {'id': task-4234087, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.612464} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.440494] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b5e4fb4-e357-4ec1-97d9-90f67718b83d tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Deleted the datastore file {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 854.440739] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-9b5e4fb4-e357-4ec1-97d9-90f67718b83d tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 4a46d003-f57e-4089-aa60-757a4246f071] Deleted contents of the VM from datastore datastore2 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 854.441140] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-9b5e4fb4-e357-4ec1-97d9-90f67718b83d tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 4a46d003-f57e-4089-aa60-757a4246f071] Instance destroyed {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 854.441140] env[69367]: INFO nova.compute.manager [None req-9b5e4fb4-e357-4ec1-97d9-90f67718b83d tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 4a46d003-f57e-4089-aa60-757a4246f071] Took 1.68 seconds to destroy the instance on the hypervisor. [ 854.441439] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9b5e4fb4-e357-4ec1-97d9-90f67718b83d tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 854.444858] env[69367]: DEBUG nova.compute.manager [-] [instance: 4a46d003-f57e-4089-aa60-757a4246f071] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 854.444978] env[69367]: DEBUG nova.network.neutron [-] [instance: 4a46d003-f57e-4089-aa60-757a4246f071] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 854.494923] env[69367]: ERROR nova.scheduler.client.report [None req-5fd10dcf-a94e-4432-b8c9-227cd572cc3a tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [req-06edf124-89e9-45f0-a8c1-4cea2809d0a6] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-06edf124-89e9-45f0-a8c1-4cea2809d0a6"}]} [ 854.495382] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5fd10dcf-a94e-4432-b8c9-227cd572cc3a tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.677s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 854.496245] env[69367]: ERROR nova.compute.manager [None req-5fd10dcf-a94e-4432-b8c9-227cd572cc3a tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Setting instance vm_state to ERROR: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 854.496245] env[69367]: ERROR nova.compute.manager [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Traceback (most recent call last): [ 854.496245] env[69367]: ERROR nova.compute.manager [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 854.496245] env[69367]: ERROR nova.compute.manager [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] yield [ 854.496245] env[69367]: ERROR nova.compute.manager [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 854.496245] env[69367]: ERROR nova.compute.manager [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] self.set_inventory_for_provider( [ 854.496245] env[69367]: ERROR nova.compute.manager [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 854.496245] env[69367]: ERROR nova.compute.manager [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 854.496245] env[69367]: ERROR nova.compute.manager [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-06edf124-89e9-45f0-a8c1-4cea2809d0a6"}]} [ 854.496245] env[69367]: ERROR nova.compute.manager [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] [ 854.496245] env[69367]: ERROR nova.compute.manager [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] During handling of the above exception, another exception occurred: [ 854.496245] env[69367]: ERROR nova.compute.manager [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] [ 854.496245] env[69367]: ERROR nova.compute.manager [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Traceback (most recent call last): [ 854.496245] env[69367]: ERROR nova.compute.manager [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 854.496245] env[69367]: ERROR nova.compute.manager [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] self._delete_instance(context, instance, bdms) [ 854.496245] env[69367]: ERROR nova.compute.manager [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 854.496245] env[69367]: ERROR nova.compute.manager [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] self._complete_deletion(context, instance) [ 854.496245] env[69367]: ERROR nova.compute.manager [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 854.496245] env[69367]: ERROR nova.compute.manager [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] self._update_resource_tracker(context, instance) [ 854.496245] env[69367]: ERROR nova.compute.manager [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 854.496245] env[69367]: ERROR nova.compute.manager [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] self.rt.update_usage(context, instance, instance.node) [ 854.496245] env[69367]: ERROR nova.compute.manager [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 854.496245] env[69367]: ERROR nova.compute.manager [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] return f(*args, **kwargs) [ 854.496245] env[69367]: ERROR nova.compute.manager [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 854.496245] env[69367]: ERROR nova.compute.manager [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] self._update(context.elevated(), self.compute_nodes[nodename]) [ 854.496245] env[69367]: ERROR nova.compute.manager [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 854.496245] env[69367]: ERROR nova.compute.manager [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] self._update_to_placement(context, compute_node, startup) [ 854.496245] env[69367]: ERROR nova.compute.manager [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 854.496245] env[69367]: ERROR nova.compute.manager [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 854.496245] env[69367]: ERROR nova.compute.manager [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 854.496245] env[69367]: ERROR nova.compute.manager [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] return attempt.get(self._wrap_exception) [ 854.496245] env[69367]: ERROR nova.compute.manager [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 854.496245] env[69367]: ERROR nova.compute.manager [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] six.reraise(self.value[0], self.value[1], self.value[2]) [ 854.496245] env[69367]: ERROR nova.compute.manager [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 854.497015] env[69367]: ERROR nova.compute.manager [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] raise value [ 854.497015] env[69367]: ERROR nova.compute.manager [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 854.497015] env[69367]: ERROR nova.compute.manager [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 854.497015] env[69367]: ERROR nova.compute.manager [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 854.497015] env[69367]: ERROR nova.compute.manager [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] self.reportclient.update_from_provider_tree( [ 854.497015] env[69367]: ERROR nova.compute.manager [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 854.497015] env[69367]: ERROR nova.compute.manager [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] with catch_all(pd.uuid): [ 854.497015] env[69367]: ERROR nova.compute.manager [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 854.497015] env[69367]: ERROR nova.compute.manager [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] self.gen.throw(typ, value, traceback) [ 854.497015] env[69367]: ERROR nova.compute.manager [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 854.497015] env[69367]: ERROR nova.compute.manager [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] raise exception.ResourceProviderSyncFailed() [ 854.497015] env[69367]: ERROR nova.compute.manager [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 854.497015] env[69367]: ERROR nova.compute.manager [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] [ 854.500027] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 15.575s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 854.522296] env[69367]: DEBUG oslo_vmware.api [None req-e35df00c-08dc-425a-a9e2-95b6e24a7398 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Task: {'id': task-4234089, 'name': PowerOffVM_Task, 'duration_secs': 0.152742} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.522604] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-e35df00c-08dc-425a-a9e2-95b6e24a7398 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] [instance: c272b0ae-6313-46ab-977c-6de255e77675] Powered off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 854.522871] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-e35df00c-08dc-425a-a9e2-95b6e24a7398 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] [instance: c272b0ae-6313-46ab-977c-6de255e77675] Unregistering the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 854.523228] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3af44dad-f14f-4fa3-b2a1-db1033d80cb3 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.552719] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-e35df00c-08dc-425a-a9e2-95b6e24a7398 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] [instance: c272b0ae-6313-46ab-977c-6de255e77675] Unregistered the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 854.552797] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-e35df00c-08dc-425a-a9e2-95b6e24a7398 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] [instance: c272b0ae-6313-46ab-977c-6de255e77675] Deleting contents of the VM from datastore datastore1 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 854.553085] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-e35df00c-08dc-425a-a9e2-95b6e24a7398 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Deleting the datastore file [datastore1] c272b0ae-6313-46ab-977c-6de255e77675 {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 854.553474] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a4b03dbe-ae88-4f8d-83dd-0eb6820db811 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.563783] env[69367]: DEBUG oslo_vmware.api [None req-e35df00c-08dc-425a-a9e2-95b6e24a7398 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Waiting for the task: (returnval){ [ 854.563783] env[69367]: value = "task-4234092" [ 854.563783] env[69367]: _type = "Task" [ 854.563783] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.638118] env[69367]: DEBUG oslo_concurrency.lockutils [None req-9a266199-78e6-4e87-9d42-759928f6556d tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 854.753943] env[69367]: DEBUG nova.compute.manager [req-63591b40-bcd7-456d-a38d-f8a917fb4765 req-238dfa3a-11de-462c-9013-fc1977559056 service nova] [instance: 4a46d003-f57e-4089-aa60-757a4246f071] Received event network-vif-deleted-badee9f4-aebf-4455-81d3-ddbb3adb8072 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 854.754224] env[69367]: INFO nova.compute.manager [req-63591b40-bcd7-456d-a38d-f8a917fb4765 req-238dfa3a-11de-462c-9013-fc1977559056 service nova] [instance: 4a46d003-f57e-4089-aa60-757a4246f071] Neutron deleted interface badee9f4-aebf-4455-81d3-ddbb3adb8072; detaching it from the instance and deleting it from the info cache [ 854.754376] env[69367]: DEBUG nova.network.neutron [req-63591b40-bcd7-456d-a38d-f8a917fb4765 req-238dfa3a-11de-462c-9013-fc1977559056 service nova] [instance: 4a46d003-f57e-4089-aa60-757a4246f071] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 854.902334] env[69367]: DEBUG oslo_vmware.api [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Task: {'id': task-4234090, 'name': Rename_Task, 'duration_secs': 0.186797} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.902715] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] Powering on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 854.902964] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e57f8bae-0a26-4057-9a76-fd35f7c77218 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.912251] env[69367]: DEBUG oslo_vmware.api [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Waiting for the task: (returnval){ [ 854.912251] env[69367]: value = "task-4234093" [ 854.912251] env[69367]: _type = "Task" [ 854.912251] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.921749] env[69367]: DEBUG oslo_vmware.api [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Task: {'id': task-4234093, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.973106] env[69367]: INFO nova.scheduler.client.report [None req-5585bf21-ce60-40c9-b64f-d5c29c2b3a79 tempest-ServersListShow2100Test-1441418592 tempest-ServersListShow2100Test-1441418592-project-member] Deleted allocations for instance 097b74f5-19a1-41be-968d-19489ea9733c [ 855.003507] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5fd10dcf-a94e-4432-b8c9-227cd572cc3a tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Lock "d2f8328d-fd05-4e63-9cbd-a6e3ec948964" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.297s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 855.080093] env[69367]: DEBUG oslo_vmware.api [None req-e35df00c-08dc-425a-a9e2-95b6e24a7398 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Task: {'id': task-4234092, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.203528} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.080093] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-e35df00c-08dc-425a-a9e2-95b6e24a7398 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Deleted the datastore file {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 855.080607] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-e35df00c-08dc-425a-a9e2-95b6e24a7398 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] [instance: c272b0ae-6313-46ab-977c-6de255e77675] Deleted contents of the VM from datastore datastore1 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 855.080607] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-e35df00c-08dc-425a-a9e2-95b6e24a7398 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] [instance: c272b0ae-6313-46ab-977c-6de255e77675] Instance destroyed {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 855.080793] env[69367]: INFO nova.compute.manager [None req-e35df00c-08dc-425a-a9e2-95b6e24a7398 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] [instance: c272b0ae-6313-46ab-977c-6de255e77675] Took 1.09 seconds to destroy the instance on the hypervisor. [ 855.081351] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e35df00c-08dc-425a-a9e2-95b6e24a7398 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 855.081720] env[69367]: DEBUG nova.compute.manager [-] [instance: c272b0ae-6313-46ab-977c-6de255e77675] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 855.082198] env[69367]: DEBUG nova.network.neutron [-] [instance: c272b0ae-6313-46ab-977c-6de255e77675] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 855.102893] env[69367]: DEBUG nova.network.neutron [-] [instance: c272b0ae-6313-46ab-977c-6de255e77675] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 855.223022] env[69367]: DEBUG nova.network.neutron [-] [instance: 4a46d003-f57e-4089-aa60-757a4246f071] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 855.259125] env[69367]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2b79dd7a-4314-4fa0-8309-3f44d8cd2589 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.269258] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eccf456e-999d-4577-8c7d-ea92a4803570 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.305543] env[69367]: DEBUG nova.compute.manager [req-63591b40-bcd7-456d-a38d-f8a917fb4765 req-238dfa3a-11de-462c-9013-fc1977559056 service nova] [instance: 4a46d003-f57e-4089-aa60-757a4246f071] Detach interface failed, port_id=badee9f4-aebf-4455-81d3-ddbb3adb8072, reason: Instance 4a46d003-f57e-4089-aa60-757a4246f071 could not be found. {{(pid=69367) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11601}} [ 855.423084] env[69367]: DEBUG oslo_vmware.api [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Task: {'id': task-4234093, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.462335] env[69367]: DEBUG oslo_vmware.rw_handles [None req-54e245f1-820f-4615-9485-964f0c04f9a5 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ac71ee-68b4-2443-2721-639862639e70/disk-0.vmdk. {{(pid=69367) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 855.463379] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a346d6b-2f88-48bc-a214-40711a97027b {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.470031] env[69367]: DEBUG oslo_vmware.rw_handles [None req-54e245f1-820f-4615-9485-964f0c04f9a5 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ac71ee-68b4-2443-2721-639862639e70/disk-0.vmdk is in state: ready. {{(pid=69367) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 855.470214] env[69367]: ERROR oslo_vmware.rw_handles [None req-54e245f1-820f-4615-9485-964f0c04f9a5 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ac71ee-68b4-2443-2721-639862639e70/disk-0.vmdk due to incomplete transfer. [ 855.470449] env[69367]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-1940bf32-7f4d-4238-be4e-0826f883d906 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.478276] env[69367]: DEBUG oslo_vmware.rw_handles [None req-54e245f1-820f-4615-9485-964f0c04f9a5 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ac71ee-68b4-2443-2721-639862639e70/disk-0.vmdk. {{(pid=69367) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 855.478515] env[69367]: DEBUG nova.virt.vmwareapi.images [None req-54e245f1-820f-4615-9485-964f0c04f9a5 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Uploaded image 57f6198a-1bce-4321-9d01-6d55899490ca to the Glance image server {{(pid=69367) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 855.480592] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-54e245f1-820f-4615-9485-964f0c04f9a5 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Destroying the VM {{(pid=69367) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 855.481035] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5585bf21-ce60-40c9-b64f-d5c29c2b3a79 tempest-ServersListShow2100Test-1441418592 tempest-ServersListShow2100Test-1441418592-project-member] Lock "097b74f5-19a1-41be-968d-19489ea9733c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.937s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 855.481275] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-f0a82339-ed8c-4a94-8fee-bbd3aac875ee {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.488404] env[69367]: DEBUG oslo_vmware.api [None req-54e245f1-820f-4615-9485-964f0c04f9a5 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Waiting for the task: (returnval){ [ 855.488404] env[69367]: value = "task-4234094" [ 855.488404] env[69367]: _type = "Task" [ 855.488404] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.499279] env[69367]: DEBUG oslo_vmware.api [None req-54e245f1-820f-4615-9485-964f0c04f9a5 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Task: {'id': task-4234094, 'name': Destroy_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.606197] env[69367]: DEBUG nova.network.neutron [-] [instance: c272b0ae-6313-46ab-977c-6de255e77675] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 855.727036] env[69367]: INFO nova.compute.manager [-] [instance: 4a46d003-f57e-4089-aa60-757a4246f071] Took 1.28 seconds to deallocate network for instance. [ 855.924417] env[69367]: DEBUG oslo_vmware.api [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Task: {'id': task-4234093, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.000071] env[69367]: DEBUG oslo_vmware.api [None req-54e245f1-820f-4615-9485-964f0c04f9a5 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Task: {'id': task-4234094, 'name': Destroy_Task, 'duration_secs': 0.348775} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.000437] env[69367]: INFO nova.virt.vmwareapi.vm_util [None req-54e245f1-820f-4615-9485-964f0c04f9a5 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Destroyed the VM [ 856.000724] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-54e245f1-820f-4615-9485-964f0c04f9a5 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Deleting Snapshot of the VM instance {{(pid=69367) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 856.001016] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-59ffd316-470d-4517-b9a1-3522ffca4d4f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.009395] env[69367]: DEBUG oslo_vmware.api [None req-54e245f1-820f-4615-9485-964f0c04f9a5 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Waiting for the task: (returnval){ [ 856.009395] env[69367]: value = "task-4234095" [ 856.009395] env[69367]: _type = "Task" [ 856.009395] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.019742] env[69367]: DEBUG oslo_vmware.api [None req-54e245f1-820f-4615-9485-964f0c04f9a5 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Task: {'id': task-4234095, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.103260] env[69367]: DEBUG oslo_concurrency.lockutils [None req-34a62984-6384-406f-821f-265cad7ab0f7 tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Acquiring lock "f311f965-e846-4519-8375-ffd831e6afc7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 856.103576] env[69367]: DEBUG oslo_concurrency.lockutils [None req-34a62984-6384-406f-821f-265cad7ab0f7 tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Lock "f311f965-e846-4519-8375-ffd831e6afc7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 856.109247] env[69367]: INFO nova.compute.manager [-] [instance: c272b0ae-6313-46ab-977c-6de255e77675] Took 1.03 seconds to deallocate network for instance. [ 856.234566] env[69367]: DEBUG oslo_concurrency.lockutils [None req-9b5e4fb4-e357-4ec1-97d9-90f67718b83d tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 856.425379] env[69367]: DEBUG oslo_vmware.api [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Task: {'id': task-4234093, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.521554] env[69367]: DEBUG oslo_vmware.api [None req-54e245f1-820f-4615-9485-964f0c04f9a5 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Task: {'id': task-4234095, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.534222] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5fd10dcf-a94e-4432-b8c9-227cd572cc3a tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 856.605988] env[69367]: DEBUG nova.compute.manager [None req-34a62984-6384-406f-821f-265cad7ab0f7 tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: f311f965-e846-4519-8375-ffd831e6afc7] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 856.615773] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e35df00c-08dc-425a-a9e2-95b6e24a7398 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 856.925245] env[69367]: DEBUG oslo_vmware.api [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Task: {'id': task-4234093, 'name': PowerOnVM_Task} progress is 86%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.019675] env[69367]: DEBUG oslo_vmware.api [None req-54e245f1-820f-4615-9485-964f0c04f9a5 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Task: {'id': task-4234095, 'name': RemoveSnapshot_Task, 'duration_secs': 0.706201} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.019941] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-54e245f1-820f-4615-9485-964f0c04f9a5 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Deleted Snapshot of the VM instance {{(pid=69367) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 857.020234] env[69367]: INFO nova.compute.manager [None req-54e245f1-820f-4615-9485-964f0c04f9a5 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Took 16.90 seconds to snapshot the instance on the hypervisor. [ 857.130828] env[69367]: DEBUG oslo_concurrency.lockutils [None req-34a62984-6384-406f-821f-265cad7ab0f7 tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 857.201198] env[69367]: DEBUG oslo_concurrency.lockutils [None req-adc3b60d-ebe8-4239-8397-225765297e6e tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Acquiring lock "4f53c9fd-4c1a-4ac0-8116-41e54be9de18" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 857.201449] env[69367]: DEBUG oslo_concurrency.lockutils [None req-adc3b60d-ebe8-4239-8397-225765297e6e tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Lock "4f53c9fd-4c1a-4ac0-8116-41e54be9de18" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 857.391340] env[69367]: DEBUG oslo_concurrency.lockutils [None req-17f0e1de-c958-43f7-a014-b456f23e5dbb tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Acquiring lock "9aec881e-8381-4626-b527-3df7e0671d8f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 857.391562] env[69367]: DEBUG oslo_concurrency.lockutils [None req-17f0e1de-c958-43f7-a014-b456f23e5dbb tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Lock "9aec881e-8381-4626-b527-3df7e0671d8f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 857.426544] env[69367]: DEBUG oslo_vmware.api [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Task: {'id': task-4234093, 'name': PowerOnVM_Task, 'duration_secs': 2.477137} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.426801] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] Powered on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 857.427012] env[69367]: INFO nova.compute.manager [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] Took 10.52 seconds to spawn the instance on the hypervisor. [ 857.427224] env[69367]: DEBUG nova.compute.manager [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] Checking state {{(pid=69367) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 857.428013] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fda55b51-3e3c-40d8-af2d-e10f5d9a3061 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.703651] env[69367]: DEBUG nova.compute.manager [None req-adc3b60d-ebe8-4239-8397-225765297e6e tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 4f53c9fd-4c1a-4ac0-8116-41e54be9de18] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 857.894129] env[69367]: DEBUG nova.compute.manager [None req-17f0e1de-c958-43f7-a014-b456f23e5dbb tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: 9aec881e-8381-4626-b527-3df7e0671d8f] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 857.947176] env[69367]: INFO nova.compute.manager [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] Took 35.02 seconds to build instance. [ 858.361603] env[69367]: DEBUG oslo_concurrency.lockutils [None req-adc3b60d-ebe8-4239-8397-225765297e6e tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 858.415177] env[69367]: DEBUG oslo_concurrency.lockutils [None req-17f0e1de-c958-43f7-a014-b456f23e5dbb tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 858.449550] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a9fc9f3e-763b-488a-b9c5-bd879a01937c tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Lock "05aae150-5d86-4210-ae7e-8c63e83cb907" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 49.111s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 858.578601] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance a358ce6d-9826-4ddb-8c2f-51bac8db59d4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 858.578771] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 4e346ed1-36e9-421d-975f-e8bb6f05c0a0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 858.578944] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance e1c7d100-4ad7-4871-970f-bb7562bfc6fc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 858.579054] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 858.579178] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance ab365570-ac29-4094-be4c-d49563a465c8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 858.579339] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance c17525ee-d038-4c81-932b-ed74a6de6cb5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 858.579488] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 92c27615-d377-492f-a9db-ff45b2e71537 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 858.579850] env[69367]: WARNING nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance d2f8328d-fd05-4e63-9cbd-a6e3ec948964 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 858.580018] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 858.580170] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 011ab7de-98a7-41fc-9e05-e71965c73c09 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 858.580296] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance f8c07fa1-d27c-4d0f-847b-481477cd04bf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 858.580413] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 837b4093-308b-440b-940d-fc0227a5c590 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 858.580586] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 7f937d89-684b-44f5-9f30-783aeafe99d1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 858.580695] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance ab9d8e3e-65c5-4ac9-920f-3042b8cf2054 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 858.580726] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 557dc011-44a1-4240-9596-d055d57e176f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 858.580853] env[69367]: WARNING nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 48470f96-56d2-4ca2-8078-c5ff4f6db71b is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 858.580967] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 95efcff3-a81b-49fb-b85a-dae060c023b2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 858.581092] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance c272b0ae-6313-46ab-977c-6de255e77675 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 858.581206] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 4a46d003-f57e-4089-aa60-757a4246f071 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 858.581315] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 46b6bc45-57f0-4850-9249-6bbb22b162c6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 858.581422] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 54a1f586-481d-427e-ba0b-be90e5573bd3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 858.581529] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance d900df05-b65c-4a45-94d1-563afbf9c022 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 858.581636] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 05aae150-5d86-4210-ae7e-8c63e83cb907 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 858.647389] env[69367]: DEBUG oslo_concurrency.lockutils [None req-f82abe69-1db3-4cf3-a5d6-299bbe83f2f8 tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Acquiring lock "interface-05aae150-5d86-4210-ae7e-8c63e83cb907-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 858.647842] env[69367]: DEBUG oslo_concurrency.lockutils [None req-f82abe69-1db3-4cf3-a5d6-299bbe83f2f8 tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Lock "interface-05aae150-5d86-4210-ae7e-8c63e83cb907-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 858.648330] env[69367]: DEBUG nova.objects.instance [None req-f82abe69-1db3-4cf3-a5d6-299bbe83f2f8 tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Lazy-loading 'flavor' on Instance uuid 05aae150-5d86-4210-ae7e-8c63e83cb907 {{(pid=69367) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 859.087090] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 4901e02d-c55c-4c27-8d5a-e48c7e83aaa9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 859.154368] env[69367]: DEBUG nova.objects.instance [None req-f82abe69-1db3-4cf3-a5d6-299bbe83f2f8 tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Lazy-loading 'pci_requests' on Instance uuid 05aae150-5d86-4210-ae7e-8c63e83cb907 {{(pid=69367) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 859.589850] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 2b2a47ca-47d7-43bb-80cd-801e08f327ec has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 859.658336] env[69367]: DEBUG nova.objects.base [None req-f82abe69-1db3-4cf3-a5d6-299bbe83f2f8 tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Object Instance<05aae150-5d86-4210-ae7e-8c63e83cb907> lazy-loaded attributes: flavor,pci_requests {{(pid=69367) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 859.658551] env[69367]: DEBUG nova.network.neutron [None req-f82abe69-1db3-4cf3-a5d6-299bbe83f2f8 tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] allocate_for_instance() {{(pid=69367) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 859.736416] env[69367]: DEBUG oslo_concurrency.lockutils [None req-f82abe69-1db3-4cf3-a5d6-299bbe83f2f8 tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Lock "interface-05aae150-5d86-4210-ae7e-8c63e83cb907-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.089s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 860.093719] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance f24baa3c-a91e-4bcc-8d8b-980e8d2a2f05 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 860.597048] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 27684757-2b5f-4c20-901d-70a9e19cf4a5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 861.099910] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance b5fa1af4-0295-49ed-a101-7810e507bf64 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 861.603318] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 2086bd49-6926-4466-9ad0-74f9dbc8b31a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 861.718914] env[69367]: DEBUG oslo_concurrency.lockutils [None req-4dc2f4c5-b530-43f5-82af-dd384731d332 tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Acquiring lock "05aae150-5d86-4210-ae7e-8c63e83cb907" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 861.718914] env[69367]: DEBUG oslo_concurrency.lockutils [None req-4dc2f4c5-b530-43f5-82af-dd384731d332 tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Lock "05aae150-5d86-4210-ae7e-8c63e83cb907" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 861.718914] env[69367]: DEBUG oslo_concurrency.lockutils [None req-4dc2f4c5-b530-43f5-82af-dd384731d332 tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Acquiring lock "05aae150-5d86-4210-ae7e-8c63e83cb907-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 861.718914] env[69367]: DEBUG oslo_concurrency.lockutils [None req-4dc2f4c5-b530-43f5-82af-dd384731d332 tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Lock "05aae150-5d86-4210-ae7e-8c63e83cb907-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 861.718914] env[69367]: DEBUG oslo_concurrency.lockutils [None req-4dc2f4c5-b530-43f5-82af-dd384731d332 tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Lock "05aae150-5d86-4210-ae7e-8c63e83cb907-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 861.721340] env[69367]: INFO nova.compute.manager [None req-4dc2f4c5-b530-43f5-82af-dd384731d332 tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] Terminating instance [ 862.106952] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance f311f965-e846-4519-8375-ffd831e6afc7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 862.229050] env[69367]: DEBUG nova.compute.manager [None req-4dc2f4c5-b530-43f5-82af-dd384731d332 tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] Start destroying the instance on the hypervisor. {{(pid=69367) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 862.229252] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-4dc2f4c5-b530-43f5-82af-dd384731d332 tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] Destroying instance {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 862.230205] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fb1de56-458c-4bf6-ae33-d2683f13b748 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.238406] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-4dc2f4c5-b530-43f5-82af-dd384731d332 tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] Powering off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 862.238650] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5ea5f851-0a31-4862-98e9-873cfa6f0f32 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.246271] env[69367]: DEBUG oslo_vmware.api [None req-4dc2f4c5-b530-43f5-82af-dd384731d332 tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Waiting for the task: (returnval){ [ 862.246271] env[69367]: value = "task-4234096" [ 862.246271] env[69367]: _type = "Task" [ 862.246271] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.256346] env[69367]: DEBUG oslo_vmware.api [None req-4dc2f4c5-b530-43f5-82af-dd384731d332 tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Task: {'id': task-4234096, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.611073] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 4f53c9fd-4c1a-4ac0-8116-41e54be9de18 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 862.756596] env[69367]: DEBUG oslo_vmware.api [None req-4dc2f4c5-b530-43f5-82af-dd384731d332 tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Task: {'id': task-4234096, 'name': PowerOffVM_Task, 'duration_secs': 0.222976} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.756907] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-4dc2f4c5-b530-43f5-82af-dd384731d332 tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] Powered off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 862.757110] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-4dc2f4c5-b530-43f5-82af-dd384731d332 tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] Unregistering the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 862.757393] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d485c8f9-0475-4aff-9e78-147ffd4bd394 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.824452] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-4dc2f4c5-b530-43f5-82af-dd384731d332 tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] Unregistered the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 862.824804] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-4dc2f4c5-b530-43f5-82af-dd384731d332 tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] Deleting contents of the VM from datastore datastore2 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 862.825101] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-4dc2f4c5-b530-43f5-82af-dd384731d332 tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Deleting the datastore file [datastore2] 05aae150-5d86-4210-ae7e-8c63e83cb907 {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 862.825401] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3f337bb2-e13d-49ee-8e44-ff6823713ec5 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.833053] env[69367]: DEBUG oslo_vmware.api [None req-4dc2f4c5-b530-43f5-82af-dd384731d332 tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Waiting for the task: (returnval){ [ 862.833053] env[69367]: value = "task-4234098" [ 862.833053] env[69367]: _type = "Task" [ 862.833053] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.840826] env[69367]: DEBUG oslo_vmware.api [None req-4dc2f4c5-b530-43f5-82af-dd384731d332 tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Task: {'id': task-4234098, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.113837] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 9aec881e-8381-4626-b527-3df7e0671d8f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 863.114184] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Total usable vcpus: 48, total allocated vcpus: 21 {{(pid=69367) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 863.114316] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=4544MB phys_disk=200GB used_disk=21GB total_vcpus=48 used_vcpus=21 pci_stats=[] {{(pid=69367) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 863.132438] env[69367]: DEBUG nova.scheduler.client.report [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 863.148437] env[69367]: DEBUG nova.scheduler.client.report [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 863.148739] env[69367]: DEBUG nova.compute.provider_tree [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 863.162294] env[69367]: DEBUG nova.scheduler.client.report [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 863.179801] env[69367]: DEBUG nova.scheduler.client.report [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 863.342439] env[69367]: DEBUG oslo_vmware.api [None req-4dc2f4c5-b530-43f5-82af-dd384731d332 tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Task: {'id': task-4234098, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.152628} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.344837] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-4dc2f4c5-b530-43f5-82af-dd384731d332 tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Deleted the datastore file {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 863.345046] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-4dc2f4c5-b530-43f5-82af-dd384731d332 tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] Deleted contents of the VM from datastore datastore2 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 863.345231] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-4dc2f4c5-b530-43f5-82af-dd384731d332 tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] Instance destroyed {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 863.345408] env[69367]: INFO nova.compute.manager [None req-4dc2f4c5-b530-43f5-82af-dd384731d332 tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] Took 1.12 seconds to destroy the instance on the hypervisor. [ 863.345644] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4dc2f4c5-b530-43f5-82af-dd384731d332 tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 863.348023] env[69367]: DEBUG nova.compute.manager [-] [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 863.348023] env[69367]: DEBUG nova.network.neutron [-] [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 863.596603] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5440699e-cb67-495e-9c7e-4b65250b9609 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.605309] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-635ef298-9a90-49bc-9810-04a3021e8bd9 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.638867] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bb7e724-3956-46fe-8742-62ca8149cc62 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.649232] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-263ea024-6a0e-40b0-a5be-2009078c3d13 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.663579] env[69367]: DEBUG nova.compute.provider_tree [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 863.695032] env[69367]: DEBUG nova.compute.manager [req-25f1e8b2-bef0-401a-beab-714da7bf91dd req-68261396-e7de-4f77-ad7b-05840c8e16c2 service nova] [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] Received event network-vif-deleted-783fd02a-aef2-4f21-aebf-723120f844c2 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 863.695032] env[69367]: INFO nova.compute.manager [req-25f1e8b2-bef0-401a-beab-714da7bf91dd req-68261396-e7de-4f77-ad7b-05840c8e16c2 service nova] [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] Neutron deleted interface 783fd02a-aef2-4f21-aebf-723120f844c2; detaching it from the instance and deleting it from the info cache [ 863.695032] env[69367]: DEBUG nova.network.neutron [req-25f1e8b2-bef0-401a-beab-714da7bf91dd req-68261396-e7de-4f77-ad7b-05840c8e16c2 service nova] [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 864.111217] env[69367]: DEBUG nova.network.neutron [-] [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 864.185208] env[69367]: ERROR nova.scheduler.client.report [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] [req-79025fc7-bad9-46ab-8807-72b638e483f1] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-79025fc7-bad9-46ab-8807-72b638e483f1"}]} [ 864.185601] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 9.686s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 864.186176] env[69367]: ERROR nova.compute.manager [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Error updating resources for node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28.: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 864.186176] env[69367]: ERROR nova.compute.manager Traceback (most recent call last): [ 864.186176] env[69367]: ERROR nova.compute.manager File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 864.186176] env[69367]: ERROR nova.compute.manager yield [ 864.186176] env[69367]: ERROR nova.compute.manager File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 864.186176] env[69367]: ERROR nova.compute.manager self.set_inventory_for_provider( [ 864.186176] env[69367]: ERROR nova.compute.manager File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 864.186176] env[69367]: ERROR nova.compute.manager raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 864.186176] env[69367]: ERROR nova.compute.manager nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-79025fc7-bad9-46ab-8807-72b638e483f1"}]} [ 864.186176] env[69367]: ERROR nova.compute.manager [ 864.186176] env[69367]: ERROR nova.compute.manager During handling of the above exception, another exception occurred: [ 864.186176] env[69367]: ERROR nova.compute.manager [ 864.186176] env[69367]: ERROR nova.compute.manager Traceback (most recent call last): [ 864.186176] env[69367]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 11219, in _update_available_resource_for_node [ 864.186176] env[69367]: ERROR nova.compute.manager self.rt.update_available_resource(context, nodename, [ 864.186176] env[69367]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/resource_tracker.py", line 965, in update_available_resource [ 864.186176] env[69367]: ERROR nova.compute.manager self._update_available_resource(context, resources, startup=startup) [ 864.186176] env[69367]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 864.186176] env[69367]: ERROR nova.compute.manager return f(*args, **kwargs) [ 864.186176] env[69367]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1096, in _update_available_resource [ 864.186176] env[69367]: ERROR nova.compute.manager self._update(context, cn, startup=startup) [ 864.186176] env[69367]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 864.186176] env[69367]: ERROR nova.compute.manager self._update_to_placement(context, compute_node, startup) [ 864.186176] env[69367]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 864.186176] env[69367]: ERROR nova.compute.manager return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 864.186176] env[69367]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 864.186176] env[69367]: ERROR nova.compute.manager return attempt.get(self._wrap_exception) [ 864.186176] env[69367]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 864.186176] env[69367]: ERROR nova.compute.manager six.reraise(self.value[0], self.value[1], self.value[2]) [ 864.186176] env[69367]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 864.186176] env[69367]: ERROR nova.compute.manager raise value [ 864.186176] env[69367]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 864.186176] env[69367]: ERROR nova.compute.manager attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 864.186176] env[69367]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 864.186176] env[69367]: ERROR nova.compute.manager self.reportclient.update_from_provider_tree( [ 864.186176] env[69367]: ERROR nova.compute.manager File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 864.186176] env[69367]: ERROR nova.compute.manager with catch_all(pd.uuid): [ 864.186176] env[69367]: ERROR nova.compute.manager File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 864.186176] env[69367]: ERROR nova.compute.manager self.gen.throw(typ, value, traceback) [ 864.186176] env[69367]: ERROR nova.compute.manager File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 864.186176] env[69367]: ERROR nova.compute.manager raise exception.ResourceProviderSyncFailed() [ 864.186176] env[69367]: ERROR nova.compute.manager nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 864.186176] env[69367]: ERROR nova.compute.manager [ 864.187455] env[69367]: DEBUG oslo_concurrency.lockutils [None req-ab1bccff-30fc-46dc-87b0-e255eed0c66e tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.351s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 864.187975] env[69367]: INFO nova.compute.claims [None req-ab1bccff-30fc-46dc-87b0-e255eed0c66e tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [instance: 4901e02d-c55c-4c27-8d5a-e48c7e83aaa9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 864.190670] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 864.190820] env[69367]: DEBUG nova.compute.manager [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Cleaning up deleted instances {{(pid=69367) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11864}} [ 864.198225] env[69367]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5ec4f47c-c9cf-4970-9bcb-0e5231e92e5c {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.209787] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93835ec1-6ac1-4aa6-8fde-71ccca8c5498 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.238665] env[69367]: DEBUG nova.compute.manager [req-25f1e8b2-bef0-401a-beab-714da7bf91dd req-68261396-e7de-4f77-ad7b-05840c8e16c2 service nova] [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] Detach interface failed, port_id=783fd02a-aef2-4f21-aebf-723120f844c2, reason: Instance 05aae150-5d86-4210-ae7e-8c63e83cb907 could not be found. {{(pid=69367) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11601}} [ 864.614393] env[69367]: INFO nova.compute.manager [-] [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] Took 1.27 seconds to deallocate network for instance. [ 864.698326] env[69367]: DEBUG nova.compute.manager [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] There are 8 instances to clean {{(pid=69367) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11873}} [ 864.698583] env[69367]: DEBUG nova.compute.manager [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] [instance: 788b843c-1496-4562-a761-44f3e1ce6da2] Instance has had 0 of 5 cleanup attempts {{(pid=69367) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11877}} [ 865.122524] env[69367]: DEBUG oslo_concurrency.lockutils [None req-4dc2f4c5-b530-43f5-82af-dd384731d332 tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 865.206464] env[69367]: DEBUG nova.compute.manager [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] [instance: bdc0938b-60ef-463a-b3fd-1754f38a3b79] Instance has had 0 of 5 cleanup attempts {{(pid=69367) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11877}} [ 865.223286] env[69367]: DEBUG nova.scheduler.client.report [None req-ab1bccff-30fc-46dc-87b0-e255eed0c66e tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 865.240534] env[69367]: DEBUG nova.scheduler.client.report [None req-ab1bccff-30fc-46dc-87b0-e255eed0c66e tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 865.240769] env[69367]: DEBUG nova.compute.provider_tree [None req-ab1bccff-30fc-46dc-87b0-e255eed0c66e tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 865.252608] env[69367]: DEBUG nova.scheduler.client.report [None req-ab1bccff-30fc-46dc-87b0-e255eed0c66e tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 865.273412] env[69367]: DEBUG nova.scheduler.client.report [None req-ab1bccff-30fc-46dc-87b0-e255eed0c66e tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 865.632845] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab9513ea-af23-40da-b291-2c11c00ba1b8 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.640873] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29044d4d-df00-4312-936a-e3c24ab6bf1f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.670416] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ea8fbfa-48e5-4100-94f9-003ffdfd05ba {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.678690] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-beeb89fb-c907-4d11-a01a-0d5e823ab474 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.692890] env[69367]: DEBUG nova.compute.provider_tree [None req-ab1bccff-30fc-46dc-87b0-e255eed0c66e tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 865.708218] env[69367]: DEBUG nova.compute.manager [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] [instance: fa4a5dbc-b885-4439-8520-0bfff38438b3] Instance has had 0 of 5 cleanup attempts {{(pid=69367) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11877}} [ 866.211093] env[69367]: DEBUG nova.compute.manager [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] [instance: ba4d981a-19f7-41ef-b7d1-a3f3830fe725] Instance has had 0 of 5 cleanup attempts {{(pid=69367) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11877}} [ 866.216689] env[69367]: ERROR nova.scheduler.client.report [None req-ab1bccff-30fc-46dc-87b0-e255eed0c66e tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [req-2988f520-11a4-433d-a70a-51860e3b369a] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-2988f520-11a4-433d-a70a-51860e3b369a"}]} [ 866.217155] env[69367]: DEBUG oslo_concurrency.lockutils [None req-ab1bccff-30fc-46dc-87b0-e255eed0c66e tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.031s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 866.217773] env[69367]: ERROR nova.compute.manager [None req-ab1bccff-30fc-46dc-87b0-e255eed0c66e tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [instance: 4901e02d-c55c-4c27-8d5a-e48c7e83aaa9] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 866.217773] env[69367]: ERROR nova.compute.manager [instance: 4901e02d-c55c-4c27-8d5a-e48c7e83aaa9] Traceback (most recent call last): [ 866.217773] env[69367]: ERROR nova.compute.manager [instance: 4901e02d-c55c-4c27-8d5a-e48c7e83aaa9] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 866.217773] env[69367]: ERROR nova.compute.manager [instance: 4901e02d-c55c-4c27-8d5a-e48c7e83aaa9] yield [ 866.217773] env[69367]: ERROR nova.compute.manager [instance: 4901e02d-c55c-4c27-8d5a-e48c7e83aaa9] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 866.217773] env[69367]: ERROR nova.compute.manager [instance: 4901e02d-c55c-4c27-8d5a-e48c7e83aaa9] self.set_inventory_for_provider( [ 866.217773] env[69367]: ERROR nova.compute.manager [instance: 4901e02d-c55c-4c27-8d5a-e48c7e83aaa9] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 866.217773] env[69367]: ERROR nova.compute.manager [instance: 4901e02d-c55c-4c27-8d5a-e48c7e83aaa9] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 866.217773] env[69367]: ERROR nova.compute.manager [instance: 4901e02d-c55c-4c27-8d5a-e48c7e83aaa9] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-2988f520-11a4-433d-a70a-51860e3b369a"}]} [ 866.217773] env[69367]: ERROR nova.compute.manager [instance: 4901e02d-c55c-4c27-8d5a-e48c7e83aaa9] [ 866.217773] env[69367]: ERROR nova.compute.manager [instance: 4901e02d-c55c-4c27-8d5a-e48c7e83aaa9] During handling of the above exception, another exception occurred: [ 866.217773] env[69367]: ERROR nova.compute.manager [instance: 4901e02d-c55c-4c27-8d5a-e48c7e83aaa9] [ 866.217773] env[69367]: ERROR nova.compute.manager [instance: 4901e02d-c55c-4c27-8d5a-e48c7e83aaa9] Traceback (most recent call last): [ 866.217773] env[69367]: ERROR nova.compute.manager [instance: 4901e02d-c55c-4c27-8d5a-e48c7e83aaa9] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 866.217773] env[69367]: ERROR nova.compute.manager [instance: 4901e02d-c55c-4c27-8d5a-e48c7e83aaa9] with self.rt.instance_claim(context, instance, node, allocs, [ 866.217773] env[69367]: ERROR nova.compute.manager [instance: 4901e02d-c55c-4c27-8d5a-e48c7e83aaa9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 866.217773] env[69367]: ERROR nova.compute.manager [instance: 4901e02d-c55c-4c27-8d5a-e48c7e83aaa9] return f(*args, **kwargs) [ 866.217773] env[69367]: ERROR nova.compute.manager [instance: 4901e02d-c55c-4c27-8d5a-e48c7e83aaa9] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 866.217773] env[69367]: ERROR nova.compute.manager [instance: 4901e02d-c55c-4c27-8d5a-e48c7e83aaa9] self._update(elevated, cn) [ 866.217773] env[69367]: ERROR nova.compute.manager [instance: 4901e02d-c55c-4c27-8d5a-e48c7e83aaa9] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 866.217773] env[69367]: ERROR nova.compute.manager [instance: 4901e02d-c55c-4c27-8d5a-e48c7e83aaa9] self._update_to_placement(context, compute_node, startup) [ 866.217773] env[69367]: ERROR nova.compute.manager [instance: 4901e02d-c55c-4c27-8d5a-e48c7e83aaa9] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 866.217773] env[69367]: ERROR nova.compute.manager [instance: 4901e02d-c55c-4c27-8d5a-e48c7e83aaa9] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 866.217773] env[69367]: ERROR nova.compute.manager [instance: 4901e02d-c55c-4c27-8d5a-e48c7e83aaa9] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 866.217773] env[69367]: ERROR nova.compute.manager [instance: 4901e02d-c55c-4c27-8d5a-e48c7e83aaa9] return attempt.get(self._wrap_exception) [ 866.217773] env[69367]: ERROR nova.compute.manager [instance: 4901e02d-c55c-4c27-8d5a-e48c7e83aaa9] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 866.217773] env[69367]: ERROR nova.compute.manager [instance: 4901e02d-c55c-4c27-8d5a-e48c7e83aaa9] six.reraise(self.value[0], self.value[1], self.value[2]) [ 866.217773] env[69367]: ERROR nova.compute.manager [instance: 4901e02d-c55c-4c27-8d5a-e48c7e83aaa9] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 866.217773] env[69367]: ERROR nova.compute.manager [instance: 4901e02d-c55c-4c27-8d5a-e48c7e83aaa9] raise value [ 866.217773] env[69367]: ERROR nova.compute.manager [instance: 4901e02d-c55c-4c27-8d5a-e48c7e83aaa9] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 866.217773] env[69367]: ERROR nova.compute.manager [instance: 4901e02d-c55c-4c27-8d5a-e48c7e83aaa9] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 866.217773] env[69367]: ERROR nova.compute.manager [instance: 4901e02d-c55c-4c27-8d5a-e48c7e83aaa9] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 866.217773] env[69367]: ERROR nova.compute.manager [instance: 4901e02d-c55c-4c27-8d5a-e48c7e83aaa9] self.reportclient.update_from_provider_tree( [ 866.217773] env[69367]: ERROR nova.compute.manager [instance: 4901e02d-c55c-4c27-8d5a-e48c7e83aaa9] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 866.218812] env[69367]: ERROR nova.compute.manager [instance: 4901e02d-c55c-4c27-8d5a-e48c7e83aaa9] with catch_all(pd.uuid): [ 866.218812] env[69367]: ERROR nova.compute.manager [instance: 4901e02d-c55c-4c27-8d5a-e48c7e83aaa9] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 866.218812] env[69367]: ERROR nova.compute.manager [instance: 4901e02d-c55c-4c27-8d5a-e48c7e83aaa9] self.gen.throw(typ, value, traceback) [ 866.218812] env[69367]: ERROR nova.compute.manager [instance: 4901e02d-c55c-4c27-8d5a-e48c7e83aaa9] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 866.218812] env[69367]: ERROR nova.compute.manager [instance: 4901e02d-c55c-4c27-8d5a-e48c7e83aaa9] raise exception.ResourceProviderSyncFailed() [ 866.218812] env[69367]: ERROR nova.compute.manager [instance: 4901e02d-c55c-4c27-8d5a-e48c7e83aaa9] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 866.218812] env[69367]: ERROR nova.compute.manager [instance: 4901e02d-c55c-4c27-8d5a-e48c7e83aaa9] [ 866.218812] env[69367]: DEBUG nova.compute.utils [None req-ab1bccff-30fc-46dc-87b0-e255eed0c66e tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [instance: 4901e02d-c55c-4c27-8d5a-e48c7e83aaa9] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 866.219615] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0efedbe2-1e1f-4c5c-8e21-05ff0a267fd9 tempest-ServerActionsTestOtherB-179286054 tempest-ServerActionsTestOtherB-179286054-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.235s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 866.221168] env[69367]: INFO nova.compute.claims [None req-0efedbe2-1e1f-4c5c-8e21-05ff0a267fd9 tempest-ServerActionsTestOtherB-179286054 tempest-ServerActionsTestOtherB-179286054-project-member] [instance: 2b2a47ca-47d7-43bb-80cd-801e08f327ec] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 866.224962] env[69367]: DEBUG nova.compute.manager [None req-ab1bccff-30fc-46dc-87b0-e255eed0c66e tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [instance: 4901e02d-c55c-4c27-8d5a-e48c7e83aaa9] Build of instance 4901e02d-c55c-4c27-8d5a-e48c7e83aaa9 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 866.225426] env[69367]: DEBUG nova.compute.manager [None req-ab1bccff-30fc-46dc-87b0-e255eed0c66e tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [instance: 4901e02d-c55c-4c27-8d5a-e48c7e83aaa9] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 866.225678] env[69367]: DEBUG oslo_concurrency.lockutils [None req-ab1bccff-30fc-46dc-87b0-e255eed0c66e tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Acquiring lock "refresh_cache-4901e02d-c55c-4c27-8d5a-e48c7e83aaa9" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 866.225861] env[69367]: DEBUG oslo_concurrency.lockutils [None req-ab1bccff-30fc-46dc-87b0-e255eed0c66e tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Acquired lock "refresh_cache-4901e02d-c55c-4c27-8d5a-e48c7e83aaa9" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 866.226048] env[69367]: DEBUG nova.network.neutron [None req-ab1bccff-30fc-46dc-87b0-e255eed0c66e tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [instance: 4901e02d-c55c-4c27-8d5a-e48c7e83aaa9] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 866.714376] env[69367]: DEBUG nova.compute.manager [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] [instance: 92bdb1b1-d8ab-46b2-9037-ee8fea4642ce] Instance has had 0 of 5 cleanup attempts {{(pid=69367) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11877}} [ 866.751628] env[69367]: DEBUG nova.network.neutron [None req-ab1bccff-30fc-46dc-87b0-e255eed0c66e tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [instance: 4901e02d-c55c-4c27-8d5a-e48c7e83aaa9] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 866.844617] env[69367]: DEBUG nova.network.neutron [None req-ab1bccff-30fc-46dc-87b0-e255eed0c66e tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [instance: 4901e02d-c55c-4c27-8d5a-e48c7e83aaa9] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 867.218364] env[69367]: DEBUG nova.compute.manager [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] [instance: 5c7b2127-e875-4222-8148-a2ea60631c25] Instance has had 0 of 5 cleanup attempts {{(pid=69367) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11877}} [ 867.249773] env[69367]: DEBUG nova.scheduler.client.report [None req-0efedbe2-1e1f-4c5c-8e21-05ff0a267fd9 tempest-ServerActionsTestOtherB-179286054 tempest-ServerActionsTestOtherB-179286054-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 867.265562] env[69367]: DEBUG nova.scheduler.client.report [None req-0efedbe2-1e1f-4c5c-8e21-05ff0a267fd9 tempest-ServerActionsTestOtherB-179286054 tempest-ServerActionsTestOtherB-179286054-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 867.265800] env[69367]: DEBUG nova.compute.provider_tree [None req-0efedbe2-1e1f-4c5c-8e21-05ff0a267fd9 tempest-ServerActionsTestOtherB-179286054 tempest-ServerActionsTestOtherB-179286054-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 867.277170] env[69367]: DEBUG nova.scheduler.client.report [None req-0efedbe2-1e1f-4c5c-8e21-05ff0a267fd9 tempest-ServerActionsTestOtherB-179286054 tempest-ServerActionsTestOtherB-179286054-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 867.294473] env[69367]: DEBUG nova.scheduler.client.report [None req-0efedbe2-1e1f-4c5c-8e21-05ff0a267fd9 tempest-ServerActionsTestOtherB-179286054 tempest-ServerActionsTestOtherB-179286054-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 867.349167] env[69367]: DEBUG oslo_concurrency.lockutils [None req-ab1bccff-30fc-46dc-87b0-e255eed0c66e tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Releasing lock "refresh_cache-4901e02d-c55c-4c27-8d5a-e48c7e83aaa9" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 867.349217] env[69367]: DEBUG nova.compute.manager [None req-ab1bccff-30fc-46dc-87b0-e255eed0c66e tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 867.349371] env[69367]: DEBUG nova.compute.manager [None req-ab1bccff-30fc-46dc-87b0-e255eed0c66e tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [instance: 4901e02d-c55c-4c27-8d5a-e48c7e83aaa9] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 867.349542] env[69367]: DEBUG nova.network.neutron [None req-ab1bccff-30fc-46dc-87b0-e255eed0c66e tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [instance: 4901e02d-c55c-4c27-8d5a-e48c7e83aaa9] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 867.367477] env[69367]: DEBUG nova.network.neutron [None req-ab1bccff-30fc-46dc-87b0-e255eed0c66e tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [instance: 4901e02d-c55c-4c27-8d5a-e48c7e83aaa9] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 867.659951] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbdfbb84-1232-4663-b94a-27b0b48c5dcd {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.667753] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-446278e5-15d5-4458-923a-a7cc27134d08 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.698156] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2fa581c-1301-416f-91aa-3f1048306abd {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.705598] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58950427-2585-4de5-9400-0b72d983d11c {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.718629] env[69367]: DEBUG nova.compute.provider_tree [None req-0efedbe2-1e1f-4c5c-8e21-05ff0a267fd9 tempest-ServerActionsTestOtherB-179286054 tempest-ServerActionsTestOtherB-179286054-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 867.723660] env[69367]: DEBUG nova.compute.manager [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] [instance: 1302cad6-55b7-4905-92c1-dfdd37042e30] Instance has had 0 of 5 cleanup attempts {{(pid=69367) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11877}} [ 867.871274] env[69367]: DEBUG nova.network.neutron [None req-ab1bccff-30fc-46dc-87b0-e255eed0c66e tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [instance: 4901e02d-c55c-4c27-8d5a-e48c7e83aaa9] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 868.227542] env[69367]: DEBUG nova.compute.manager [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] [instance: 937c05e9-06f1-4a5f-9f8c-ac40c262ce4e] Instance has had 0 of 5 cleanup attempts {{(pid=69367) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11877}} [ 868.242310] env[69367]: ERROR nova.scheduler.client.report [None req-0efedbe2-1e1f-4c5c-8e21-05ff0a267fd9 tempest-ServerActionsTestOtherB-179286054 tempest-ServerActionsTestOtherB-179286054-project-member] [req-1cfedb40-3d15-4609-9dd0-539291dcd1ac] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-1cfedb40-3d15-4609-9dd0-539291dcd1ac"}]} [ 868.242714] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0efedbe2-1e1f-4c5c-8e21-05ff0a267fd9 tempest-ServerActionsTestOtherB-179286054 tempest-ServerActionsTestOtherB-179286054-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.023s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 868.243367] env[69367]: ERROR nova.compute.manager [None req-0efedbe2-1e1f-4c5c-8e21-05ff0a267fd9 tempest-ServerActionsTestOtherB-179286054 tempest-ServerActionsTestOtherB-179286054-project-member] [instance: 2b2a47ca-47d7-43bb-80cd-801e08f327ec] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 868.243367] env[69367]: ERROR nova.compute.manager [instance: 2b2a47ca-47d7-43bb-80cd-801e08f327ec] Traceback (most recent call last): [ 868.243367] env[69367]: ERROR nova.compute.manager [instance: 2b2a47ca-47d7-43bb-80cd-801e08f327ec] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 868.243367] env[69367]: ERROR nova.compute.manager [instance: 2b2a47ca-47d7-43bb-80cd-801e08f327ec] yield [ 868.243367] env[69367]: ERROR nova.compute.manager [instance: 2b2a47ca-47d7-43bb-80cd-801e08f327ec] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 868.243367] env[69367]: ERROR nova.compute.manager [instance: 2b2a47ca-47d7-43bb-80cd-801e08f327ec] self.set_inventory_for_provider( [ 868.243367] env[69367]: ERROR nova.compute.manager [instance: 2b2a47ca-47d7-43bb-80cd-801e08f327ec] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 868.243367] env[69367]: ERROR nova.compute.manager [instance: 2b2a47ca-47d7-43bb-80cd-801e08f327ec] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 868.243367] env[69367]: ERROR nova.compute.manager [instance: 2b2a47ca-47d7-43bb-80cd-801e08f327ec] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-1cfedb40-3d15-4609-9dd0-539291dcd1ac"}]} [ 868.243367] env[69367]: ERROR nova.compute.manager [instance: 2b2a47ca-47d7-43bb-80cd-801e08f327ec] [ 868.243367] env[69367]: ERROR nova.compute.manager [instance: 2b2a47ca-47d7-43bb-80cd-801e08f327ec] During handling of the above exception, another exception occurred: [ 868.243367] env[69367]: ERROR nova.compute.manager [instance: 2b2a47ca-47d7-43bb-80cd-801e08f327ec] [ 868.243367] env[69367]: ERROR nova.compute.manager [instance: 2b2a47ca-47d7-43bb-80cd-801e08f327ec] Traceback (most recent call last): [ 868.243367] env[69367]: ERROR nova.compute.manager [instance: 2b2a47ca-47d7-43bb-80cd-801e08f327ec] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 868.243367] env[69367]: ERROR nova.compute.manager [instance: 2b2a47ca-47d7-43bb-80cd-801e08f327ec] with self.rt.instance_claim(context, instance, node, allocs, [ 868.243367] env[69367]: ERROR nova.compute.manager [instance: 2b2a47ca-47d7-43bb-80cd-801e08f327ec] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 868.243367] env[69367]: ERROR nova.compute.manager [instance: 2b2a47ca-47d7-43bb-80cd-801e08f327ec] return f(*args, **kwargs) [ 868.243367] env[69367]: ERROR nova.compute.manager [instance: 2b2a47ca-47d7-43bb-80cd-801e08f327ec] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 868.243367] env[69367]: ERROR nova.compute.manager [instance: 2b2a47ca-47d7-43bb-80cd-801e08f327ec] self._update(elevated, cn) [ 868.243367] env[69367]: ERROR nova.compute.manager [instance: 2b2a47ca-47d7-43bb-80cd-801e08f327ec] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 868.243367] env[69367]: ERROR nova.compute.manager [instance: 2b2a47ca-47d7-43bb-80cd-801e08f327ec] self._update_to_placement(context, compute_node, startup) [ 868.243367] env[69367]: ERROR nova.compute.manager [instance: 2b2a47ca-47d7-43bb-80cd-801e08f327ec] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 868.243367] env[69367]: ERROR nova.compute.manager [instance: 2b2a47ca-47d7-43bb-80cd-801e08f327ec] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 868.243367] env[69367]: ERROR nova.compute.manager [instance: 2b2a47ca-47d7-43bb-80cd-801e08f327ec] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 868.243367] env[69367]: ERROR nova.compute.manager [instance: 2b2a47ca-47d7-43bb-80cd-801e08f327ec] return attempt.get(self._wrap_exception) [ 868.243367] env[69367]: ERROR nova.compute.manager [instance: 2b2a47ca-47d7-43bb-80cd-801e08f327ec] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 868.243367] env[69367]: ERROR nova.compute.manager [instance: 2b2a47ca-47d7-43bb-80cd-801e08f327ec] six.reraise(self.value[0], self.value[1], self.value[2]) [ 868.243367] env[69367]: ERROR nova.compute.manager [instance: 2b2a47ca-47d7-43bb-80cd-801e08f327ec] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 868.243367] env[69367]: ERROR nova.compute.manager [instance: 2b2a47ca-47d7-43bb-80cd-801e08f327ec] raise value [ 868.243367] env[69367]: ERROR nova.compute.manager [instance: 2b2a47ca-47d7-43bb-80cd-801e08f327ec] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 868.243367] env[69367]: ERROR nova.compute.manager [instance: 2b2a47ca-47d7-43bb-80cd-801e08f327ec] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 868.243367] env[69367]: ERROR nova.compute.manager [instance: 2b2a47ca-47d7-43bb-80cd-801e08f327ec] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 868.243367] env[69367]: ERROR nova.compute.manager [instance: 2b2a47ca-47d7-43bb-80cd-801e08f327ec] self.reportclient.update_from_provider_tree( [ 868.243367] env[69367]: ERROR nova.compute.manager [instance: 2b2a47ca-47d7-43bb-80cd-801e08f327ec] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 868.244213] env[69367]: ERROR nova.compute.manager [instance: 2b2a47ca-47d7-43bb-80cd-801e08f327ec] with catch_all(pd.uuid): [ 868.244213] env[69367]: ERROR nova.compute.manager [instance: 2b2a47ca-47d7-43bb-80cd-801e08f327ec] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 868.244213] env[69367]: ERROR nova.compute.manager [instance: 2b2a47ca-47d7-43bb-80cd-801e08f327ec] self.gen.throw(typ, value, traceback) [ 868.244213] env[69367]: ERROR nova.compute.manager [instance: 2b2a47ca-47d7-43bb-80cd-801e08f327ec] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 868.244213] env[69367]: ERROR nova.compute.manager [instance: 2b2a47ca-47d7-43bb-80cd-801e08f327ec] raise exception.ResourceProviderSyncFailed() [ 868.244213] env[69367]: ERROR nova.compute.manager [instance: 2b2a47ca-47d7-43bb-80cd-801e08f327ec] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 868.244213] env[69367]: ERROR nova.compute.manager [instance: 2b2a47ca-47d7-43bb-80cd-801e08f327ec] [ 868.244213] env[69367]: DEBUG nova.compute.utils [None req-0efedbe2-1e1f-4c5c-8e21-05ff0a267fd9 tempest-ServerActionsTestOtherB-179286054 tempest-ServerActionsTestOtherB-179286054-project-member] [instance: 2b2a47ca-47d7-43bb-80cd-801e08f327ec] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 868.245471] env[69367]: DEBUG oslo_concurrency.lockutils [None req-7d4a53ba-51e5-4ef7-a64b-94f8d6538c4f tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.159s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 868.247096] env[69367]: INFO nova.compute.claims [None req-7d4a53ba-51e5-4ef7-a64b-94f8d6538c4f tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: f24baa3c-a91e-4bcc-8d8b-980e8d2a2f05] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 868.250147] env[69367]: DEBUG nova.compute.manager [None req-0efedbe2-1e1f-4c5c-8e21-05ff0a267fd9 tempest-ServerActionsTestOtherB-179286054 tempest-ServerActionsTestOtherB-179286054-project-member] [instance: 2b2a47ca-47d7-43bb-80cd-801e08f327ec] Build of instance 2b2a47ca-47d7-43bb-80cd-801e08f327ec was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 868.250625] env[69367]: DEBUG nova.compute.manager [None req-0efedbe2-1e1f-4c5c-8e21-05ff0a267fd9 tempest-ServerActionsTestOtherB-179286054 tempest-ServerActionsTestOtherB-179286054-project-member] [instance: 2b2a47ca-47d7-43bb-80cd-801e08f327ec] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 868.250877] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0efedbe2-1e1f-4c5c-8e21-05ff0a267fd9 tempest-ServerActionsTestOtherB-179286054 tempest-ServerActionsTestOtherB-179286054-project-member] Acquiring lock "refresh_cache-2b2a47ca-47d7-43bb-80cd-801e08f327ec" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.251542] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0efedbe2-1e1f-4c5c-8e21-05ff0a267fd9 tempest-ServerActionsTestOtherB-179286054 tempest-ServerActionsTestOtherB-179286054-project-member] Acquired lock "refresh_cache-2b2a47ca-47d7-43bb-80cd-801e08f327ec" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 868.251542] env[69367]: DEBUG nova.network.neutron [None req-0efedbe2-1e1f-4c5c-8e21-05ff0a267fd9 tempest-ServerActionsTestOtherB-179286054 tempest-ServerActionsTestOtherB-179286054-project-member] [instance: 2b2a47ca-47d7-43bb-80cd-801e08f327ec] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 868.373562] env[69367]: INFO nova.compute.manager [None req-ab1bccff-30fc-46dc-87b0-e255eed0c66e tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [instance: 4901e02d-c55c-4c27-8d5a-e48c7e83aaa9] Took 1.02 seconds to deallocate network for instance. [ 868.731143] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 868.731287] env[69367]: DEBUG nova.compute.manager [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Cleaning up deleted instances with incomplete migration {{(pid=69367) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11902}} [ 868.770419] env[69367]: DEBUG nova.network.neutron [None req-0efedbe2-1e1f-4c5c-8e21-05ff0a267fd9 tempest-ServerActionsTestOtherB-179286054 tempest-ServerActionsTestOtherB-179286054-project-member] [instance: 2b2a47ca-47d7-43bb-80cd-801e08f327ec] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 868.845884] env[69367]: DEBUG nova.network.neutron [None req-0efedbe2-1e1f-4c5c-8e21-05ff0a267fd9 tempest-ServerActionsTestOtherB-179286054 tempest-ServerActionsTestOtherB-179286054-project-member] [instance: 2b2a47ca-47d7-43bb-80cd-801e08f327ec] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 869.235662] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 869.272999] env[69367]: DEBUG nova.scheduler.client.report [None req-7d4a53ba-51e5-4ef7-a64b-94f8d6538c4f tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 869.288269] env[69367]: DEBUG nova.scheduler.client.report [None req-7d4a53ba-51e5-4ef7-a64b-94f8d6538c4f tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 869.288515] env[69367]: DEBUG nova.compute.provider_tree [None req-7d4a53ba-51e5-4ef7-a64b-94f8d6538c4f tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 869.300048] env[69367]: DEBUG nova.scheduler.client.report [None req-7d4a53ba-51e5-4ef7-a64b-94f8d6538c4f tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 869.322962] env[69367]: DEBUG nova.scheduler.client.report [None req-7d4a53ba-51e5-4ef7-a64b-94f8d6538c4f tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 869.350246] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0efedbe2-1e1f-4c5c-8e21-05ff0a267fd9 tempest-ServerActionsTestOtherB-179286054 tempest-ServerActionsTestOtherB-179286054-project-member] Releasing lock "refresh_cache-2b2a47ca-47d7-43bb-80cd-801e08f327ec" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 869.350491] env[69367]: DEBUG nova.compute.manager [None req-0efedbe2-1e1f-4c5c-8e21-05ff0a267fd9 tempest-ServerActionsTestOtherB-179286054 tempest-ServerActionsTestOtherB-179286054-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 869.350673] env[69367]: DEBUG nova.compute.manager [None req-0efedbe2-1e1f-4c5c-8e21-05ff0a267fd9 tempest-ServerActionsTestOtherB-179286054 tempest-ServerActionsTestOtherB-179286054-project-member] [instance: 2b2a47ca-47d7-43bb-80cd-801e08f327ec] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 869.350843] env[69367]: DEBUG nova.network.neutron [None req-0efedbe2-1e1f-4c5c-8e21-05ff0a267fd9 tempest-ServerActionsTestOtherB-179286054 tempest-ServerActionsTestOtherB-179286054-project-member] [instance: 2b2a47ca-47d7-43bb-80cd-801e08f327ec] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 869.372334] env[69367]: DEBUG nova.network.neutron [None req-0efedbe2-1e1f-4c5c-8e21-05ff0a267fd9 tempest-ServerActionsTestOtherB-179286054 tempest-ServerActionsTestOtherB-179286054-project-member] [instance: 2b2a47ca-47d7-43bb-80cd-801e08f327ec] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 869.403031] env[69367]: INFO nova.scheduler.client.report [None req-ab1bccff-30fc-46dc-87b0-e255eed0c66e tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Deleted allocations for instance 4901e02d-c55c-4c27-8d5a-e48c7e83aaa9 [ 869.711750] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0f87902-d06a-4ff1-9e73-efa789d2d3cf {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.719551] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e46ea1ef-c2e5-4654-9b99-6a9ee8c33c26 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.753064] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64a06256-2071-411c-b0f9-dc6f25c45d0f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.761227] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d572b7e-fb05-4ccf-9fe6-95a137bf6082 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.775423] env[69367]: DEBUG nova.compute.provider_tree [None req-7d4a53ba-51e5-4ef7-a64b-94f8d6538c4f tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 869.877068] env[69367]: DEBUG nova.network.neutron [None req-0efedbe2-1e1f-4c5c-8e21-05ff0a267fd9 tempest-ServerActionsTestOtherB-179286054 tempest-ServerActionsTestOtherB-179286054-project-member] [instance: 2b2a47ca-47d7-43bb-80cd-801e08f327ec] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 869.913862] env[69367]: DEBUG oslo_concurrency.lockutils [None req-ab1bccff-30fc-46dc-87b0-e255eed0c66e tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Lock "4901e02d-c55c-4c27-8d5a-e48c7e83aaa9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 56.210s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 870.301344] env[69367]: ERROR nova.scheduler.client.report [None req-7d4a53ba-51e5-4ef7-a64b-94f8d6538c4f tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [req-c8608cac-c02e-416c-a641-a6630caa8c9a] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-c8608cac-c02e-416c-a641-a6630caa8c9a"}]} [ 870.301751] env[69367]: DEBUG oslo_concurrency.lockutils [None req-7d4a53ba-51e5-4ef7-a64b-94f8d6538c4f tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.056s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 870.302485] env[69367]: ERROR nova.compute.manager [None req-7d4a53ba-51e5-4ef7-a64b-94f8d6538c4f tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: f24baa3c-a91e-4bcc-8d8b-980e8d2a2f05] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 870.302485] env[69367]: ERROR nova.compute.manager [instance: f24baa3c-a91e-4bcc-8d8b-980e8d2a2f05] Traceback (most recent call last): [ 870.302485] env[69367]: ERROR nova.compute.manager [instance: f24baa3c-a91e-4bcc-8d8b-980e8d2a2f05] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 870.302485] env[69367]: ERROR nova.compute.manager [instance: f24baa3c-a91e-4bcc-8d8b-980e8d2a2f05] yield [ 870.302485] env[69367]: ERROR nova.compute.manager [instance: f24baa3c-a91e-4bcc-8d8b-980e8d2a2f05] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 870.302485] env[69367]: ERROR nova.compute.manager [instance: f24baa3c-a91e-4bcc-8d8b-980e8d2a2f05] self.set_inventory_for_provider( [ 870.302485] env[69367]: ERROR nova.compute.manager [instance: f24baa3c-a91e-4bcc-8d8b-980e8d2a2f05] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 870.302485] env[69367]: ERROR nova.compute.manager [instance: f24baa3c-a91e-4bcc-8d8b-980e8d2a2f05] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 870.302485] env[69367]: ERROR nova.compute.manager [instance: f24baa3c-a91e-4bcc-8d8b-980e8d2a2f05] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-c8608cac-c02e-416c-a641-a6630caa8c9a"}]} [ 870.302485] env[69367]: ERROR nova.compute.manager [instance: f24baa3c-a91e-4bcc-8d8b-980e8d2a2f05] [ 870.302485] env[69367]: ERROR nova.compute.manager [instance: f24baa3c-a91e-4bcc-8d8b-980e8d2a2f05] During handling of the above exception, another exception occurred: [ 870.302485] env[69367]: ERROR nova.compute.manager [instance: f24baa3c-a91e-4bcc-8d8b-980e8d2a2f05] [ 870.302485] env[69367]: ERROR nova.compute.manager [instance: f24baa3c-a91e-4bcc-8d8b-980e8d2a2f05] Traceback (most recent call last): [ 870.302485] env[69367]: ERROR nova.compute.manager [instance: f24baa3c-a91e-4bcc-8d8b-980e8d2a2f05] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 870.302485] env[69367]: ERROR nova.compute.manager [instance: f24baa3c-a91e-4bcc-8d8b-980e8d2a2f05] with self.rt.instance_claim(context, instance, node, allocs, [ 870.302485] env[69367]: ERROR nova.compute.manager [instance: f24baa3c-a91e-4bcc-8d8b-980e8d2a2f05] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 870.302485] env[69367]: ERROR nova.compute.manager [instance: f24baa3c-a91e-4bcc-8d8b-980e8d2a2f05] return f(*args, **kwargs) [ 870.302485] env[69367]: ERROR nova.compute.manager [instance: f24baa3c-a91e-4bcc-8d8b-980e8d2a2f05] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 870.302485] env[69367]: ERROR nova.compute.manager [instance: f24baa3c-a91e-4bcc-8d8b-980e8d2a2f05] self._update(elevated, cn) [ 870.302485] env[69367]: ERROR nova.compute.manager [instance: f24baa3c-a91e-4bcc-8d8b-980e8d2a2f05] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 870.302485] env[69367]: ERROR nova.compute.manager [instance: f24baa3c-a91e-4bcc-8d8b-980e8d2a2f05] self._update_to_placement(context, compute_node, startup) [ 870.302485] env[69367]: ERROR nova.compute.manager [instance: f24baa3c-a91e-4bcc-8d8b-980e8d2a2f05] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 870.302485] env[69367]: ERROR nova.compute.manager [instance: f24baa3c-a91e-4bcc-8d8b-980e8d2a2f05] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 870.302485] env[69367]: ERROR nova.compute.manager [instance: f24baa3c-a91e-4bcc-8d8b-980e8d2a2f05] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 870.302485] env[69367]: ERROR nova.compute.manager [instance: f24baa3c-a91e-4bcc-8d8b-980e8d2a2f05] return attempt.get(self._wrap_exception) [ 870.302485] env[69367]: ERROR nova.compute.manager [instance: f24baa3c-a91e-4bcc-8d8b-980e8d2a2f05] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 870.302485] env[69367]: ERROR nova.compute.manager [instance: f24baa3c-a91e-4bcc-8d8b-980e8d2a2f05] six.reraise(self.value[0], self.value[1], self.value[2]) [ 870.302485] env[69367]: ERROR nova.compute.manager [instance: f24baa3c-a91e-4bcc-8d8b-980e8d2a2f05] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 870.302485] env[69367]: ERROR nova.compute.manager [instance: f24baa3c-a91e-4bcc-8d8b-980e8d2a2f05] raise value [ 870.302485] env[69367]: ERROR nova.compute.manager [instance: f24baa3c-a91e-4bcc-8d8b-980e8d2a2f05] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 870.302485] env[69367]: ERROR nova.compute.manager [instance: f24baa3c-a91e-4bcc-8d8b-980e8d2a2f05] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 870.302485] env[69367]: ERROR nova.compute.manager [instance: f24baa3c-a91e-4bcc-8d8b-980e8d2a2f05] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 870.302485] env[69367]: ERROR nova.compute.manager [instance: f24baa3c-a91e-4bcc-8d8b-980e8d2a2f05] self.reportclient.update_from_provider_tree( [ 870.302485] env[69367]: ERROR nova.compute.manager [instance: f24baa3c-a91e-4bcc-8d8b-980e8d2a2f05] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 870.303668] env[69367]: ERROR nova.compute.manager [instance: f24baa3c-a91e-4bcc-8d8b-980e8d2a2f05] with catch_all(pd.uuid): [ 870.303668] env[69367]: ERROR nova.compute.manager [instance: f24baa3c-a91e-4bcc-8d8b-980e8d2a2f05] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 870.303668] env[69367]: ERROR nova.compute.manager [instance: f24baa3c-a91e-4bcc-8d8b-980e8d2a2f05] self.gen.throw(typ, value, traceback) [ 870.303668] env[69367]: ERROR nova.compute.manager [instance: f24baa3c-a91e-4bcc-8d8b-980e8d2a2f05] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 870.303668] env[69367]: ERROR nova.compute.manager [instance: f24baa3c-a91e-4bcc-8d8b-980e8d2a2f05] raise exception.ResourceProviderSyncFailed() [ 870.303668] env[69367]: ERROR nova.compute.manager [instance: f24baa3c-a91e-4bcc-8d8b-980e8d2a2f05] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 870.303668] env[69367]: ERROR nova.compute.manager [instance: f24baa3c-a91e-4bcc-8d8b-980e8d2a2f05] [ 870.303668] env[69367]: DEBUG nova.compute.utils [None req-7d4a53ba-51e5-4ef7-a64b-94f8d6538c4f tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: f24baa3c-a91e-4bcc-8d8b-980e8d2a2f05] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 870.304416] env[69367]: DEBUG oslo_concurrency.lockutils [None req-06c99624-611a-4647-83e6-7287799f4e14 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.825s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 870.304608] env[69367]: DEBUG oslo_concurrency.lockutils [None req-06c99624-611a-4647-83e6-7287799f4e14 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 870.306517] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0bfe79aa-15cb-4630-92d3-2de33fe7c748 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.653s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 870.309662] env[69367]: DEBUG nova.compute.manager [None req-7d4a53ba-51e5-4ef7-a64b-94f8d6538c4f tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: f24baa3c-a91e-4bcc-8d8b-980e8d2a2f05] Build of instance f24baa3c-a91e-4bcc-8d8b-980e8d2a2f05 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 870.310183] env[69367]: DEBUG nova.compute.manager [None req-7d4a53ba-51e5-4ef7-a64b-94f8d6538c4f tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: f24baa3c-a91e-4bcc-8d8b-980e8d2a2f05] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 870.310183] env[69367]: DEBUG oslo_concurrency.lockutils [None req-7d4a53ba-51e5-4ef7-a64b-94f8d6538c4f tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Acquiring lock "refresh_cache-f24baa3c-a91e-4bcc-8d8b-980e8d2a2f05" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 870.310331] env[69367]: DEBUG oslo_concurrency.lockutils [None req-7d4a53ba-51e5-4ef7-a64b-94f8d6538c4f tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Acquired lock "refresh_cache-f24baa3c-a91e-4bcc-8d8b-980e8d2a2f05" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 870.310500] env[69367]: DEBUG nova.network.neutron [None req-7d4a53ba-51e5-4ef7-a64b-94f8d6538c4f tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: f24baa3c-a91e-4bcc-8d8b-980e8d2a2f05] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 870.324427] env[69367]: DEBUG nova.scheduler.client.report [None req-0bfe79aa-15cb-4630-92d3-2de33fe7c748 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 870.327131] env[69367]: INFO nova.scheduler.client.report [None req-06c99624-611a-4647-83e6-7287799f4e14 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Deleted allocations for instance 48470f96-56d2-4ca2-8078-c5ff4f6db71b [ 870.338164] env[69367]: DEBUG nova.scheduler.client.report [None req-0bfe79aa-15cb-4630-92d3-2de33fe7c748 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 870.338164] env[69367]: DEBUG nova.compute.provider_tree [None req-0bfe79aa-15cb-4630-92d3-2de33fe7c748 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 870.354166] env[69367]: DEBUG nova.scheduler.client.report [None req-0bfe79aa-15cb-4630-92d3-2de33fe7c748 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 870.378679] env[69367]: DEBUG nova.scheduler.client.report [None req-0bfe79aa-15cb-4630-92d3-2de33fe7c748 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 870.381728] env[69367]: INFO nova.compute.manager [None req-0efedbe2-1e1f-4c5c-8e21-05ff0a267fd9 tempest-ServerActionsTestOtherB-179286054 tempest-ServerActionsTestOtherB-179286054-project-member] [instance: 2b2a47ca-47d7-43bb-80cd-801e08f327ec] Took 1.03 seconds to deallocate network for instance. [ 870.786571] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04a33cba-4c2a-42f8-aabb-4f04aec67026 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.794814] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5d0d644-8b86-410b-81fa-0ae8ab711de2 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.829734] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-004bc6cb-771f-47ac-b99e-1be204d5d390 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.835236] env[69367]: DEBUG oslo_concurrency.lockutils [None req-06c99624-611a-4647-83e6-7287799f4e14 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Lock "48470f96-56d2-4ca2-8078-c5ff4f6db71b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.598s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 870.840284] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c4af98f-f2f9-4f8f-a494-a0905f2d3a5a {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.846020] env[69367]: DEBUG nova.network.neutron [None req-7d4a53ba-51e5-4ef7-a64b-94f8d6538c4f tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: f24baa3c-a91e-4bcc-8d8b-980e8d2a2f05] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 870.856826] env[69367]: DEBUG nova.compute.provider_tree [None req-0bfe79aa-15cb-4630-92d3-2de33fe7c748 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 870.928729] env[69367]: DEBUG nova.network.neutron [None req-7d4a53ba-51e5-4ef7-a64b-94f8d6538c4f tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: f24baa3c-a91e-4bcc-8d8b-980e8d2a2f05] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 871.378154] env[69367]: ERROR nova.scheduler.client.report [None req-0bfe79aa-15cb-4630-92d3-2de33fe7c748 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [req-c27f36e1-6ff8-4c98-a8fc-c64ad1a95f10] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-c27f36e1-6ff8-4c98-a8fc-c64ad1a95f10"}]}: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 871.378621] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0bfe79aa-15cb-4630-92d3-2de33fe7c748 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.072s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 871.379650] env[69367]: WARNING nova.compute.manager [None req-0bfe79aa-15cb-4630-92d3-2de33fe7c748 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Failed to revert task state for instance. Error: Failed to synchronize the placement service with resource provider information supplied by the compute host.: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 871.382024] env[69367]: DEBUG oslo_concurrency.lockutils [None req-17171b96-ca57-47e5-925e-adb21efe7e04 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.011s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 871.385387] env[69367]: ERROR oslo_messaging.rpc.server [None req-0bfe79aa-15cb-4630-92d3-2de33fe7c748 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Exception during message handling: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 871.385387] env[69367]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 871.385387] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 871.385387] env[69367]: ERROR oslo_messaging.rpc.server yield [ 871.385387] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 871.385387] env[69367]: ERROR oslo_messaging.rpc.server self.set_inventory_for_provider( [ 871.385387] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 871.385387] env[69367]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 871.385387] env[69367]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-67f6eb0b-5d25-4df6-97ff-f4f7729079fa"}]} [ 871.385387] env[69367]: ERROR oslo_messaging.rpc.server [ 871.385387] env[69367]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 871.385387] env[69367]: ERROR oslo_messaging.rpc.server [ 871.385387] env[69367]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 871.385387] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 174, in _process_incoming [ 871.385387] env[69367]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 871.385387] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 871.385387] env[69367]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 871.385387] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 871.385387] env[69367]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 871.385387] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 871.385387] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 871.385387] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 871.385387] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 871.385387] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 871.385387] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 871.385387] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 871.385387] env[69367]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 871.385387] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 167, in decorated_function [ 871.385387] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 871.385387] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 871.385387] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 871.385387] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 871.385387] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 871.385387] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 158, in decorated_function [ 871.385387] env[69367]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 871.385387] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1483, in decorated_function [ 871.385387] env[69367]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 871.385387] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 214, in decorated_function [ 871.385387] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 871.385387] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 871.385387] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 871.385387] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 871.385387] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 871.385387] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 204, in decorated_function [ 871.385387] env[69367]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 871.385387] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3388, in terminate_instance [ 871.385387] env[69367]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 871.386604] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 871.386604] env[69367]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 871.386604] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3383, in do_terminate_instance [ 871.386604] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 871.386604] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 871.386604] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 871.386604] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 871.386604] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 871.386604] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 871.386604] env[69367]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 871.386604] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 871.386604] env[69367]: ERROR oslo_messaging.rpc.server self._complete_deletion(context, instance) [ 871.386604] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 871.386604] env[69367]: ERROR oslo_messaging.rpc.server self._update_resource_tracker(context, instance) [ 871.386604] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 871.386604] env[69367]: ERROR oslo_messaging.rpc.server self.rt.update_usage(context, instance, instance.node) [ 871.386604] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 871.386604] env[69367]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 871.386604] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 871.386604] env[69367]: ERROR oslo_messaging.rpc.server self._update(context.elevated(), self.compute_nodes[nodename]) [ 871.386604] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 871.386604] env[69367]: ERROR oslo_messaging.rpc.server self._update_to_placement(context, compute_node, startup) [ 871.386604] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 871.386604] env[69367]: ERROR oslo_messaging.rpc.server return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 871.386604] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 871.386604] env[69367]: ERROR oslo_messaging.rpc.server return attempt.get(self._wrap_exception) [ 871.386604] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 871.386604] env[69367]: ERROR oslo_messaging.rpc.server six.reraise(self.value[0], self.value[1], self.value[2]) [ 871.386604] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 871.386604] env[69367]: ERROR oslo_messaging.rpc.server raise value [ 871.386604] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 871.386604] env[69367]: ERROR oslo_messaging.rpc.server attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 871.386604] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 871.386604] env[69367]: ERROR oslo_messaging.rpc.server self.reportclient.update_from_provider_tree( [ 871.386604] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 871.386604] env[69367]: ERROR oslo_messaging.rpc.server with catch_all(pd.uuid): [ 871.386604] env[69367]: ERROR oslo_messaging.rpc.server File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 871.386604] env[69367]: ERROR oslo_messaging.rpc.server self.gen.throw(typ, value, traceback) [ 871.386604] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 871.386604] env[69367]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderSyncFailed() [ 871.386604] env[69367]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 871.386604] env[69367]: ERROR oslo_messaging.rpc.server [ 871.404383] env[69367]: DEBUG nova.scheduler.client.report [None req-17171b96-ca57-47e5-925e-adb21efe7e04 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 871.414389] env[69367]: INFO nova.scheduler.client.report [None req-0efedbe2-1e1f-4c5c-8e21-05ff0a267fd9 tempest-ServerActionsTestOtherB-179286054 tempest-ServerActionsTestOtherB-179286054-project-member] Deleted allocations for instance 2b2a47ca-47d7-43bb-80cd-801e08f327ec [ 871.422054] env[69367]: DEBUG nova.scheduler.client.report [None req-17171b96-ca57-47e5-925e-adb21efe7e04 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 871.422054] env[69367]: DEBUG nova.compute.provider_tree [None req-17171b96-ca57-47e5-925e-adb21efe7e04 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 871.431533] env[69367]: DEBUG oslo_concurrency.lockutils [None req-7d4a53ba-51e5-4ef7-a64b-94f8d6538c4f tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Releasing lock "refresh_cache-f24baa3c-a91e-4bcc-8d8b-980e8d2a2f05" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 871.431955] env[69367]: DEBUG nova.compute.manager [None req-7d4a53ba-51e5-4ef7-a64b-94f8d6538c4f tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 871.432216] env[69367]: DEBUG nova.compute.manager [None req-7d4a53ba-51e5-4ef7-a64b-94f8d6538c4f tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: f24baa3c-a91e-4bcc-8d8b-980e8d2a2f05] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 871.432429] env[69367]: DEBUG nova.network.neutron [None req-7d4a53ba-51e5-4ef7-a64b-94f8d6538c4f tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: f24baa3c-a91e-4bcc-8d8b-980e8d2a2f05] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 871.435873] env[69367]: DEBUG nova.scheduler.client.report [None req-17171b96-ca57-47e5-925e-adb21efe7e04 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 871.453076] env[69367]: DEBUG nova.network.neutron [None req-7d4a53ba-51e5-4ef7-a64b-94f8d6538c4f tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: f24baa3c-a91e-4bcc-8d8b-980e8d2a2f05] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 871.456028] env[69367]: DEBUG nova.scheduler.client.report [None req-17171b96-ca57-47e5-925e-adb21efe7e04 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 871.573947] env[69367]: DEBUG oslo_concurrency.lockutils [None req-245e1d58-8589-4012-b694-037de2371c9e tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Acquiring lock "67a3aaff-83ce-4c6c-af48-a3d4c52188cf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 871.574286] env[69367]: DEBUG oslo_concurrency.lockutils [None req-245e1d58-8589-4012-b694-037de2371c9e tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Lock "67a3aaff-83ce-4c6c-af48-a3d4c52188cf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 871.848359] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28dcd6bb-629c-4ff9-8307-437be9730328 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.856776] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-565ba999-d386-4a45-bc1a-4711b0201791 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.892117] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1547fd70-3118-4ab0-bf8a-b55d026ca7f2 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.900463] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40e538f4-d874-466d-9e28-c55fc17822e5 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.914668] env[69367]: DEBUG nova.compute.provider_tree [None req-17171b96-ca57-47e5-925e-adb21efe7e04 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 871.926682] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0efedbe2-1e1f-4c5c-8e21-05ff0a267fd9 tempest-ServerActionsTestOtherB-179286054 tempest-ServerActionsTestOtherB-179286054-project-member] Lock "2b2a47ca-47d7-43bb-80cd-801e08f327ec" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 48.905s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 871.958447] env[69367]: DEBUG nova.network.neutron [None req-7d4a53ba-51e5-4ef7-a64b-94f8d6538c4f tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: f24baa3c-a91e-4bcc-8d8b-980e8d2a2f05] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 872.078810] env[69367]: DEBUG nova.compute.manager [None req-245e1d58-8589-4012-b694-037de2371c9e tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [instance: 67a3aaff-83ce-4c6c-af48-a3d4c52188cf] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 872.443021] env[69367]: ERROR nova.scheduler.client.report [None req-17171b96-ca57-47e5-925e-adb21efe7e04 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] [req-689c2ebe-75bd-4a41-ab9d-518ca51305f5] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-689c2ebe-75bd-4a41-ab9d-518ca51305f5"}]}: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 872.443021] env[69367]: DEBUG oslo_concurrency.lockutils [None req-17171b96-ca57-47e5-925e-adb21efe7e04 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.059s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 872.443021] env[69367]: WARNING nova.compute.manager [None req-17171b96-ca57-47e5-925e-adb21efe7e04 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] [instance: f8c07fa1-d27c-4d0f-847b-481477cd04bf] Failed to revert task state for instance. Error: Failed to synchronize the placement service with resource provider information supplied by the compute host.: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 872.444766] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 21.188s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 872.445070] env[69367]: DEBUG nova.objects.instance [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] [instance: c272b0ae-6313-46ab-977c-6de255e77675] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69367) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 872.449509] env[69367]: ERROR oslo_messaging.rpc.server [None req-17171b96-ca57-47e5-925e-adb21efe7e04 tempest-ServerRescueTestJSON-1132992793 tempest-ServerRescueTestJSON-1132992793-project-member] Exception during message handling: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 872.449509] env[69367]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 872.449509] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 872.449509] env[69367]: ERROR oslo_messaging.rpc.server yield [ 872.449509] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 872.449509] env[69367]: ERROR oslo_messaging.rpc.server self.set_inventory_for_provider( [ 872.449509] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 872.449509] env[69367]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 872.449509] env[69367]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-31a586ea-22c8-4155-ab8b-a077619c2aaf"}]} [ 872.449509] env[69367]: ERROR oslo_messaging.rpc.server [ 872.449509] env[69367]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 872.449509] env[69367]: ERROR oslo_messaging.rpc.server [ 872.449509] env[69367]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 872.449509] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 174, in _process_incoming [ 872.449509] env[69367]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 872.449509] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 872.449509] env[69367]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 872.449509] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 872.449509] env[69367]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 872.449509] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 872.449509] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 872.449509] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 872.449509] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 872.449509] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 872.449509] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 872.449509] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 872.449509] env[69367]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 872.449509] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 167, in decorated_function [ 872.449509] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 872.449509] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 872.449509] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 872.449509] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 872.449509] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 872.449509] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 158, in decorated_function [ 872.449509] env[69367]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 872.449509] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1483, in decorated_function [ 872.449509] env[69367]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 872.449509] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 214, in decorated_function [ 872.449509] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 872.449509] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 872.449509] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 872.449509] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 872.449509] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 872.449509] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 204, in decorated_function [ 872.449509] env[69367]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 872.449509] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3388, in terminate_instance [ 872.449509] env[69367]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 872.450692] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 872.450692] env[69367]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 872.450692] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3383, in do_terminate_instance [ 872.450692] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 872.450692] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 872.450692] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 872.450692] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 872.450692] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 872.450692] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 872.450692] env[69367]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 872.450692] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 872.450692] env[69367]: ERROR oslo_messaging.rpc.server self._complete_deletion(context, instance) [ 872.450692] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 872.450692] env[69367]: ERROR oslo_messaging.rpc.server self._update_resource_tracker(context, instance) [ 872.450692] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 872.450692] env[69367]: ERROR oslo_messaging.rpc.server self.rt.update_usage(context, instance, instance.node) [ 872.450692] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 872.450692] env[69367]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 872.450692] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 872.450692] env[69367]: ERROR oslo_messaging.rpc.server self._update(context.elevated(), self.compute_nodes[nodename]) [ 872.450692] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 872.450692] env[69367]: ERROR oslo_messaging.rpc.server self._update_to_placement(context, compute_node, startup) [ 872.450692] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 872.450692] env[69367]: ERROR oslo_messaging.rpc.server return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 872.450692] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 872.450692] env[69367]: ERROR oslo_messaging.rpc.server return attempt.get(self._wrap_exception) [ 872.450692] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 872.450692] env[69367]: ERROR oslo_messaging.rpc.server six.reraise(self.value[0], self.value[1], self.value[2]) [ 872.450692] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 872.450692] env[69367]: ERROR oslo_messaging.rpc.server raise value [ 872.450692] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 872.450692] env[69367]: ERROR oslo_messaging.rpc.server attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 872.450692] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 872.450692] env[69367]: ERROR oslo_messaging.rpc.server self.reportclient.update_from_provider_tree( [ 872.450692] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 872.450692] env[69367]: ERROR oslo_messaging.rpc.server with catch_all(pd.uuid): [ 872.450692] env[69367]: ERROR oslo_messaging.rpc.server File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 872.450692] env[69367]: ERROR oslo_messaging.rpc.server self.gen.throw(typ, value, traceback) [ 872.450692] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 872.450692] env[69367]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderSyncFailed() [ 872.450692] env[69367]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 872.450692] env[69367]: ERROR oslo_messaging.rpc.server [ 872.462278] env[69367]: INFO nova.compute.manager [None req-7d4a53ba-51e5-4ef7-a64b-94f8d6538c4f tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: f24baa3c-a91e-4bcc-8d8b-980e8d2a2f05] Took 1.03 seconds to deallocate network for instance. [ 872.605335] env[69367]: DEBUG oslo_concurrency.lockutils [None req-245e1d58-8589-4012-b694-037de2371c9e tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 873.457448] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0b0b5ce7-3f0e-41bb-894a-9ab6c54dbf71 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.013s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 873.458585] env[69367]: DEBUG oslo_concurrency.lockutils [None req-6fa38a37-a07b-410a-8e73-9c3faa03718d tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.293s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 873.460267] env[69367]: INFO nova.compute.claims [None req-6fa38a37-a07b-410a-8e73-9c3faa03718d tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 27684757-2b5f-4c20-901d-70a9e19cf4a5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 873.501020] env[69367]: INFO nova.scheduler.client.report [None req-7d4a53ba-51e5-4ef7-a64b-94f8d6538c4f tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Deleted allocations for instance f24baa3c-a91e-4bcc-8d8b-980e8d2a2f05 [ 874.011214] env[69367]: DEBUG oslo_concurrency.lockutils [None req-7d4a53ba-51e5-4ef7-a64b-94f8d6538c4f tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Lock "f24baa3c-a91e-4bcc-8d8b-980e8d2a2f05" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.780s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 874.189513] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Acquiring lock "652e2e23-7927-46ce-b8af-fffdb6ac8a3e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 874.189513] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Lock "652e2e23-7927-46ce-b8af-fffdb6ac8a3e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 874.487802] env[69367]: DEBUG nova.scheduler.client.report [None req-6fa38a37-a07b-410a-8e73-9c3faa03718d tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 874.505062] env[69367]: DEBUG nova.scheduler.client.report [None req-6fa38a37-a07b-410a-8e73-9c3faa03718d tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 874.505355] env[69367]: DEBUG nova.compute.provider_tree [None req-6fa38a37-a07b-410a-8e73-9c3faa03718d tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 874.519034] env[69367]: DEBUG nova.scheduler.client.report [None req-6fa38a37-a07b-410a-8e73-9c3faa03718d tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 874.540347] env[69367]: DEBUG nova.scheduler.client.report [None req-6fa38a37-a07b-410a-8e73-9c3faa03718d tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 874.690977] env[69367]: DEBUG nova.compute.manager [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 874.988144] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3179099e-c42d-4cb5-8b4f-9da9c471dc81 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.996514] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13da0f63-4f49-43e1-9516-7045d33e0d8c {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.031021] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02c7cb44-716d-418e-9dbf-36cbe2134dd7 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.036899] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b994589-6d48-4d0a-8f02-dba9a50c7f67 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.052378] env[69367]: DEBUG nova.compute.provider_tree [None req-6fa38a37-a07b-410a-8e73-9c3faa03718d tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 875.214874] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 875.578890] env[69367]: ERROR nova.scheduler.client.report [None req-6fa38a37-a07b-410a-8e73-9c3faa03718d tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [req-e88cec12-800d-45f5-873d-e7bd611f9fa4] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-e88cec12-800d-45f5-873d-e7bd611f9fa4"}]} [ 875.579307] env[69367]: DEBUG oslo_concurrency.lockutils [None req-6fa38a37-a07b-410a-8e73-9c3faa03718d tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.121s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 875.579921] env[69367]: ERROR nova.compute.manager [None req-6fa38a37-a07b-410a-8e73-9c3faa03718d tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 27684757-2b5f-4c20-901d-70a9e19cf4a5] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 875.579921] env[69367]: ERROR nova.compute.manager [instance: 27684757-2b5f-4c20-901d-70a9e19cf4a5] Traceback (most recent call last): [ 875.579921] env[69367]: ERROR nova.compute.manager [instance: 27684757-2b5f-4c20-901d-70a9e19cf4a5] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 875.579921] env[69367]: ERROR nova.compute.manager [instance: 27684757-2b5f-4c20-901d-70a9e19cf4a5] yield [ 875.579921] env[69367]: ERROR nova.compute.manager [instance: 27684757-2b5f-4c20-901d-70a9e19cf4a5] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 875.579921] env[69367]: ERROR nova.compute.manager [instance: 27684757-2b5f-4c20-901d-70a9e19cf4a5] self.set_inventory_for_provider( [ 875.579921] env[69367]: ERROR nova.compute.manager [instance: 27684757-2b5f-4c20-901d-70a9e19cf4a5] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 875.579921] env[69367]: ERROR nova.compute.manager [instance: 27684757-2b5f-4c20-901d-70a9e19cf4a5] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 875.579921] env[69367]: ERROR nova.compute.manager [instance: 27684757-2b5f-4c20-901d-70a9e19cf4a5] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-e88cec12-800d-45f5-873d-e7bd611f9fa4"}]} [ 875.579921] env[69367]: ERROR nova.compute.manager [instance: 27684757-2b5f-4c20-901d-70a9e19cf4a5] [ 875.579921] env[69367]: ERROR nova.compute.manager [instance: 27684757-2b5f-4c20-901d-70a9e19cf4a5] During handling of the above exception, another exception occurred: [ 875.579921] env[69367]: ERROR nova.compute.manager [instance: 27684757-2b5f-4c20-901d-70a9e19cf4a5] [ 875.579921] env[69367]: ERROR nova.compute.manager [instance: 27684757-2b5f-4c20-901d-70a9e19cf4a5] Traceback (most recent call last): [ 875.579921] env[69367]: ERROR nova.compute.manager [instance: 27684757-2b5f-4c20-901d-70a9e19cf4a5] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 875.579921] env[69367]: ERROR nova.compute.manager [instance: 27684757-2b5f-4c20-901d-70a9e19cf4a5] with self.rt.instance_claim(context, instance, node, allocs, [ 875.579921] env[69367]: ERROR nova.compute.manager [instance: 27684757-2b5f-4c20-901d-70a9e19cf4a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 875.579921] env[69367]: ERROR nova.compute.manager [instance: 27684757-2b5f-4c20-901d-70a9e19cf4a5] return f(*args, **kwargs) [ 875.579921] env[69367]: ERROR nova.compute.manager [instance: 27684757-2b5f-4c20-901d-70a9e19cf4a5] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 875.579921] env[69367]: ERROR nova.compute.manager [instance: 27684757-2b5f-4c20-901d-70a9e19cf4a5] self._update(elevated, cn) [ 875.579921] env[69367]: ERROR nova.compute.manager [instance: 27684757-2b5f-4c20-901d-70a9e19cf4a5] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 875.579921] env[69367]: ERROR nova.compute.manager [instance: 27684757-2b5f-4c20-901d-70a9e19cf4a5] self._update_to_placement(context, compute_node, startup) [ 875.579921] env[69367]: ERROR nova.compute.manager [instance: 27684757-2b5f-4c20-901d-70a9e19cf4a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 875.579921] env[69367]: ERROR nova.compute.manager [instance: 27684757-2b5f-4c20-901d-70a9e19cf4a5] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 875.579921] env[69367]: ERROR nova.compute.manager [instance: 27684757-2b5f-4c20-901d-70a9e19cf4a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 875.579921] env[69367]: ERROR nova.compute.manager [instance: 27684757-2b5f-4c20-901d-70a9e19cf4a5] return attempt.get(self._wrap_exception) [ 875.579921] env[69367]: ERROR nova.compute.manager [instance: 27684757-2b5f-4c20-901d-70a9e19cf4a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 875.579921] env[69367]: ERROR nova.compute.manager [instance: 27684757-2b5f-4c20-901d-70a9e19cf4a5] six.reraise(self.value[0], self.value[1], self.value[2]) [ 875.579921] env[69367]: ERROR nova.compute.manager [instance: 27684757-2b5f-4c20-901d-70a9e19cf4a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 875.579921] env[69367]: ERROR nova.compute.manager [instance: 27684757-2b5f-4c20-901d-70a9e19cf4a5] raise value [ 875.579921] env[69367]: ERROR nova.compute.manager [instance: 27684757-2b5f-4c20-901d-70a9e19cf4a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 875.579921] env[69367]: ERROR nova.compute.manager [instance: 27684757-2b5f-4c20-901d-70a9e19cf4a5] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 875.579921] env[69367]: ERROR nova.compute.manager [instance: 27684757-2b5f-4c20-901d-70a9e19cf4a5] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 875.579921] env[69367]: ERROR nova.compute.manager [instance: 27684757-2b5f-4c20-901d-70a9e19cf4a5] self.reportclient.update_from_provider_tree( [ 875.579921] env[69367]: ERROR nova.compute.manager [instance: 27684757-2b5f-4c20-901d-70a9e19cf4a5] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 875.580763] env[69367]: ERROR nova.compute.manager [instance: 27684757-2b5f-4c20-901d-70a9e19cf4a5] with catch_all(pd.uuid): [ 875.580763] env[69367]: ERROR nova.compute.manager [instance: 27684757-2b5f-4c20-901d-70a9e19cf4a5] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 875.580763] env[69367]: ERROR nova.compute.manager [instance: 27684757-2b5f-4c20-901d-70a9e19cf4a5] self.gen.throw(typ, value, traceback) [ 875.580763] env[69367]: ERROR nova.compute.manager [instance: 27684757-2b5f-4c20-901d-70a9e19cf4a5] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 875.580763] env[69367]: ERROR nova.compute.manager [instance: 27684757-2b5f-4c20-901d-70a9e19cf4a5] raise exception.ResourceProviderSyncFailed() [ 875.580763] env[69367]: ERROR nova.compute.manager [instance: 27684757-2b5f-4c20-901d-70a9e19cf4a5] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 875.580763] env[69367]: ERROR nova.compute.manager [instance: 27684757-2b5f-4c20-901d-70a9e19cf4a5] [ 875.580763] env[69367]: DEBUG nova.compute.utils [None req-6fa38a37-a07b-410a-8e73-9c3faa03718d tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 27684757-2b5f-4c20-901d-70a9e19cf4a5] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 875.582722] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fbe9aafe-7468-4383-a4ac-3c19fa16b89a tempest-ServerShowV257Test-1573549382 tempest-ServerShowV257Test-1573549382-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.348s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 875.587208] env[69367]: INFO nova.compute.claims [None req-fbe9aafe-7468-4383-a4ac-3c19fa16b89a tempest-ServerShowV257Test-1573549382 tempest-ServerShowV257Test-1573549382-project-member] [instance: b5fa1af4-0295-49ed-a101-7810e507bf64] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 875.590397] env[69367]: DEBUG nova.compute.manager [None req-6fa38a37-a07b-410a-8e73-9c3faa03718d tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 27684757-2b5f-4c20-901d-70a9e19cf4a5] Build of instance 27684757-2b5f-4c20-901d-70a9e19cf4a5 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 875.590880] env[69367]: DEBUG nova.compute.manager [None req-6fa38a37-a07b-410a-8e73-9c3faa03718d tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 27684757-2b5f-4c20-901d-70a9e19cf4a5] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 875.591125] env[69367]: DEBUG oslo_concurrency.lockutils [None req-6fa38a37-a07b-410a-8e73-9c3faa03718d tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Acquiring lock "refresh_cache-27684757-2b5f-4c20-901d-70a9e19cf4a5" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 875.591282] env[69367]: DEBUG oslo_concurrency.lockutils [None req-6fa38a37-a07b-410a-8e73-9c3faa03718d tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Acquired lock "refresh_cache-27684757-2b5f-4c20-901d-70a9e19cf4a5" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 875.591457] env[69367]: DEBUG nova.network.neutron [None req-6fa38a37-a07b-410a-8e73-9c3faa03718d tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 27684757-2b5f-4c20-901d-70a9e19cf4a5] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 876.031995] env[69367]: DEBUG oslo_concurrency.lockutils [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Acquiring lock "f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 876.032257] env[69367]: DEBUG oslo_concurrency.lockutils [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Lock "f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 876.116349] env[69367]: DEBUG nova.network.neutron [None req-6fa38a37-a07b-410a-8e73-9c3faa03718d tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 27684757-2b5f-4c20-901d-70a9e19cf4a5] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 876.252350] env[69367]: DEBUG nova.network.neutron [None req-6fa38a37-a07b-410a-8e73-9c3faa03718d tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 27684757-2b5f-4c20-901d-70a9e19cf4a5] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 876.534427] env[69367]: DEBUG nova.compute.manager [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 876.621693] env[69367]: DEBUG nova.scheduler.client.report [None req-fbe9aafe-7468-4383-a4ac-3c19fa16b89a tempest-ServerShowV257Test-1573549382 tempest-ServerShowV257Test-1573549382-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 876.637407] env[69367]: DEBUG nova.scheduler.client.report [None req-fbe9aafe-7468-4383-a4ac-3c19fa16b89a tempest-ServerShowV257Test-1573549382 tempest-ServerShowV257Test-1573549382-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 876.638059] env[69367]: DEBUG nova.compute.provider_tree [None req-fbe9aafe-7468-4383-a4ac-3c19fa16b89a tempest-ServerShowV257Test-1573549382 tempest-ServerShowV257Test-1573549382-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 876.651647] env[69367]: DEBUG nova.scheduler.client.report [None req-fbe9aafe-7468-4383-a4ac-3c19fa16b89a tempest-ServerShowV257Test-1573549382 tempest-ServerShowV257Test-1573549382-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 876.674640] env[69367]: DEBUG nova.scheduler.client.report [None req-fbe9aafe-7468-4383-a4ac-3c19fa16b89a tempest-ServerShowV257Test-1573549382 tempest-ServerShowV257Test-1573549382-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 876.755341] env[69367]: DEBUG oslo_concurrency.lockutils [None req-6fa38a37-a07b-410a-8e73-9c3faa03718d tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Releasing lock "refresh_cache-27684757-2b5f-4c20-901d-70a9e19cf4a5" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 876.755901] env[69367]: DEBUG nova.compute.manager [None req-6fa38a37-a07b-410a-8e73-9c3faa03718d tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 876.756329] env[69367]: DEBUG nova.compute.manager [None req-6fa38a37-a07b-410a-8e73-9c3faa03718d tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 27684757-2b5f-4c20-901d-70a9e19cf4a5] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 876.756711] env[69367]: DEBUG nova.network.neutron [None req-6fa38a37-a07b-410a-8e73-9c3faa03718d tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 27684757-2b5f-4c20-901d-70a9e19cf4a5] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 876.781984] env[69367]: DEBUG nova.network.neutron [None req-6fa38a37-a07b-410a-8e73-9c3faa03718d tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 27684757-2b5f-4c20-901d-70a9e19cf4a5] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 877.052830] env[69367]: DEBUG oslo_concurrency.lockutils [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 877.071202] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-538e8633-8bda-4d07-8070-f0b50d75c33c {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.079691] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7dab5e4-6abf-4e6e-b448-33100ff6afb1 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.114230] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00052e4b-43fb-4cce-84eb-99796eabdbf3 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.123617] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44d29e3c-b5ac-4c4b-a4e2-1a742b91cd15 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.138209] env[69367]: DEBUG nova.compute.provider_tree [None req-fbe9aafe-7468-4383-a4ac-3c19fa16b89a tempest-ServerShowV257Test-1573549382 tempest-ServerShowV257Test-1573549382-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 877.283454] env[69367]: DEBUG nova.network.neutron [None req-6fa38a37-a07b-410a-8e73-9c3faa03718d tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 27684757-2b5f-4c20-901d-70a9e19cf4a5] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 877.661518] env[69367]: ERROR nova.scheduler.client.report [None req-fbe9aafe-7468-4383-a4ac-3c19fa16b89a tempest-ServerShowV257Test-1573549382 tempest-ServerShowV257Test-1573549382-project-member] [req-621333bd-b365-48a2-a83b-2ebe2a9430f2] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-621333bd-b365-48a2-a83b-2ebe2a9430f2"}]} [ 877.661902] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fbe9aafe-7468-4383-a4ac-3c19fa16b89a tempest-ServerShowV257Test-1573549382 tempest-ServerShowV257Test-1573549382-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.079s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 877.662588] env[69367]: ERROR nova.compute.manager [None req-fbe9aafe-7468-4383-a4ac-3c19fa16b89a tempest-ServerShowV257Test-1573549382 tempest-ServerShowV257Test-1573549382-project-member] [instance: b5fa1af4-0295-49ed-a101-7810e507bf64] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 877.662588] env[69367]: ERROR nova.compute.manager [instance: b5fa1af4-0295-49ed-a101-7810e507bf64] Traceback (most recent call last): [ 877.662588] env[69367]: ERROR nova.compute.manager [instance: b5fa1af4-0295-49ed-a101-7810e507bf64] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 877.662588] env[69367]: ERROR nova.compute.manager [instance: b5fa1af4-0295-49ed-a101-7810e507bf64] yield [ 877.662588] env[69367]: ERROR nova.compute.manager [instance: b5fa1af4-0295-49ed-a101-7810e507bf64] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 877.662588] env[69367]: ERROR nova.compute.manager [instance: b5fa1af4-0295-49ed-a101-7810e507bf64] self.set_inventory_for_provider( [ 877.662588] env[69367]: ERROR nova.compute.manager [instance: b5fa1af4-0295-49ed-a101-7810e507bf64] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 877.662588] env[69367]: ERROR nova.compute.manager [instance: b5fa1af4-0295-49ed-a101-7810e507bf64] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 877.662588] env[69367]: ERROR nova.compute.manager [instance: b5fa1af4-0295-49ed-a101-7810e507bf64] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-621333bd-b365-48a2-a83b-2ebe2a9430f2"}]} [ 877.662588] env[69367]: ERROR nova.compute.manager [instance: b5fa1af4-0295-49ed-a101-7810e507bf64] [ 877.662588] env[69367]: ERROR nova.compute.manager [instance: b5fa1af4-0295-49ed-a101-7810e507bf64] During handling of the above exception, another exception occurred: [ 877.662588] env[69367]: ERROR nova.compute.manager [instance: b5fa1af4-0295-49ed-a101-7810e507bf64] [ 877.662588] env[69367]: ERROR nova.compute.manager [instance: b5fa1af4-0295-49ed-a101-7810e507bf64] Traceback (most recent call last): [ 877.662588] env[69367]: ERROR nova.compute.manager [instance: b5fa1af4-0295-49ed-a101-7810e507bf64] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 877.662588] env[69367]: ERROR nova.compute.manager [instance: b5fa1af4-0295-49ed-a101-7810e507bf64] with self.rt.instance_claim(context, instance, node, allocs, [ 877.662588] env[69367]: ERROR nova.compute.manager [instance: b5fa1af4-0295-49ed-a101-7810e507bf64] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 877.662588] env[69367]: ERROR nova.compute.manager [instance: b5fa1af4-0295-49ed-a101-7810e507bf64] return f(*args, **kwargs) [ 877.662588] env[69367]: ERROR nova.compute.manager [instance: b5fa1af4-0295-49ed-a101-7810e507bf64] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 877.662588] env[69367]: ERROR nova.compute.manager [instance: b5fa1af4-0295-49ed-a101-7810e507bf64] self._update(elevated, cn) [ 877.662588] env[69367]: ERROR nova.compute.manager [instance: b5fa1af4-0295-49ed-a101-7810e507bf64] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 877.662588] env[69367]: ERROR nova.compute.manager [instance: b5fa1af4-0295-49ed-a101-7810e507bf64] self._update_to_placement(context, compute_node, startup) [ 877.662588] env[69367]: ERROR nova.compute.manager [instance: b5fa1af4-0295-49ed-a101-7810e507bf64] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 877.662588] env[69367]: ERROR nova.compute.manager [instance: b5fa1af4-0295-49ed-a101-7810e507bf64] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 877.662588] env[69367]: ERROR nova.compute.manager [instance: b5fa1af4-0295-49ed-a101-7810e507bf64] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 877.662588] env[69367]: ERROR nova.compute.manager [instance: b5fa1af4-0295-49ed-a101-7810e507bf64] return attempt.get(self._wrap_exception) [ 877.662588] env[69367]: ERROR nova.compute.manager [instance: b5fa1af4-0295-49ed-a101-7810e507bf64] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 877.662588] env[69367]: ERROR nova.compute.manager [instance: b5fa1af4-0295-49ed-a101-7810e507bf64] six.reraise(self.value[0], self.value[1], self.value[2]) [ 877.662588] env[69367]: ERROR nova.compute.manager [instance: b5fa1af4-0295-49ed-a101-7810e507bf64] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 877.662588] env[69367]: ERROR nova.compute.manager [instance: b5fa1af4-0295-49ed-a101-7810e507bf64] raise value [ 877.662588] env[69367]: ERROR nova.compute.manager [instance: b5fa1af4-0295-49ed-a101-7810e507bf64] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 877.662588] env[69367]: ERROR nova.compute.manager [instance: b5fa1af4-0295-49ed-a101-7810e507bf64] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 877.662588] env[69367]: ERROR nova.compute.manager [instance: b5fa1af4-0295-49ed-a101-7810e507bf64] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 877.662588] env[69367]: ERROR nova.compute.manager [instance: b5fa1af4-0295-49ed-a101-7810e507bf64] self.reportclient.update_from_provider_tree( [ 877.662588] env[69367]: ERROR nova.compute.manager [instance: b5fa1af4-0295-49ed-a101-7810e507bf64] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 877.664370] env[69367]: ERROR nova.compute.manager [instance: b5fa1af4-0295-49ed-a101-7810e507bf64] with catch_all(pd.uuid): [ 877.664370] env[69367]: ERROR nova.compute.manager [instance: b5fa1af4-0295-49ed-a101-7810e507bf64] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 877.664370] env[69367]: ERROR nova.compute.manager [instance: b5fa1af4-0295-49ed-a101-7810e507bf64] self.gen.throw(typ, value, traceback) [ 877.664370] env[69367]: ERROR nova.compute.manager [instance: b5fa1af4-0295-49ed-a101-7810e507bf64] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 877.664370] env[69367]: ERROR nova.compute.manager [instance: b5fa1af4-0295-49ed-a101-7810e507bf64] raise exception.ResourceProviderSyncFailed() [ 877.664370] env[69367]: ERROR nova.compute.manager [instance: b5fa1af4-0295-49ed-a101-7810e507bf64] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 877.664370] env[69367]: ERROR nova.compute.manager [instance: b5fa1af4-0295-49ed-a101-7810e507bf64] [ 877.664370] env[69367]: DEBUG nova.compute.utils [None req-fbe9aafe-7468-4383-a4ac-3c19fa16b89a tempest-ServerShowV257Test-1573549382 tempest-ServerShowV257Test-1573549382-project-member] [instance: b5fa1af4-0295-49ed-a101-7810e507bf64] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 877.665152] env[69367]: DEBUG oslo_concurrency.lockutils [None req-9a266199-78e6-4e87-9d42-759928f6556d tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.027s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 877.667025] env[69367]: INFO nova.compute.claims [None req-9a266199-78e6-4e87-9d42-759928f6556d tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] [instance: 2086bd49-6926-4466-9ad0-74f9dbc8b31a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 877.672046] env[69367]: DEBUG nova.compute.manager [None req-fbe9aafe-7468-4383-a4ac-3c19fa16b89a tempest-ServerShowV257Test-1573549382 tempest-ServerShowV257Test-1573549382-project-member] [instance: b5fa1af4-0295-49ed-a101-7810e507bf64] Build of instance b5fa1af4-0295-49ed-a101-7810e507bf64 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 877.672046] env[69367]: DEBUG nova.compute.manager [None req-fbe9aafe-7468-4383-a4ac-3c19fa16b89a tempest-ServerShowV257Test-1573549382 tempest-ServerShowV257Test-1573549382-project-member] [instance: b5fa1af4-0295-49ed-a101-7810e507bf64] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 877.672046] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fbe9aafe-7468-4383-a4ac-3c19fa16b89a tempest-ServerShowV257Test-1573549382 tempest-ServerShowV257Test-1573549382-project-member] Acquiring lock "refresh_cache-b5fa1af4-0295-49ed-a101-7810e507bf64" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 877.672046] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fbe9aafe-7468-4383-a4ac-3c19fa16b89a tempest-ServerShowV257Test-1573549382 tempest-ServerShowV257Test-1573549382-project-member] Acquired lock "refresh_cache-b5fa1af4-0295-49ed-a101-7810e507bf64" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 877.672046] env[69367]: DEBUG nova.network.neutron [None req-fbe9aafe-7468-4383-a4ac-3c19fa16b89a tempest-ServerShowV257Test-1573549382 tempest-ServerShowV257Test-1573549382-project-member] [instance: b5fa1af4-0295-49ed-a101-7810e507bf64] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 877.786316] env[69367]: INFO nova.compute.manager [None req-6fa38a37-a07b-410a-8e73-9c3faa03718d tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 27684757-2b5f-4c20-901d-70a9e19cf4a5] Took 1.03 seconds to deallocate network for instance. [ 878.190671] env[69367]: DEBUG nova.network.neutron [None req-fbe9aafe-7468-4383-a4ac-3c19fa16b89a tempest-ServerShowV257Test-1573549382 tempest-ServerShowV257Test-1573549382-project-member] [instance: b5fa1af4-0295-49ed-a101-7810e507bf64] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 878.266174] env[69367]: DEBUG nova.network.neutron [None req-fbe9aafe-7468-4383-a4ac-3c19fa16b89a tempest-ServerShowV257Test-1573549382 tempest-ServerShowV257Test-1573549382-project-member] [instance: b5fa1af4-0295-49ed-a101-7810e507bf64] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 878.693762] env[69367]: DEBUG nova.scheduler.client.report [None req-9a266199-78e6-4e87-9d42-759928f6556d tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 878.711939] env[69367]: DEBUG nova.scheduler.client.report [None req-9a266199-78e6-4e87-9d42-759928f6556d tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 878.712239] env[69367]: DEBUG nova.compute.provider_tree [None req-9a266199-78e6-4e87-9d42-759928f6556d tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 878.725098] env[69367]: DEBUG nova.scheduler.client.report [None req-9a266199-78e6-4e87-9d42-759928f6556d tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 878.744553] env[69367]: DEBUG nova.scheduler.client.report [None req-9a266199-78e6-4e87-9d42-759928f6556d tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 878.769014] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fbe9aafe-7468-4383-a4ac-3c19fa16b89a tempest-ServerShowV257Test-1573549382 tempest-ServerShowV257Test-1573549382-project-member] Releasing lock "refresh_cache-b5fa1af4-0295-49ed-a101-7810e507bf64" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 878.769264] env[69367]: DEBUG nova.compute.manager [None req-fbe9aafe-7468-4383-a4ac-3c19fa16b89a tempest-ServerShowV257Test-1573549382 tempest-ServerShowV257Test-1573549382-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 878.769454] env[69367]: DEBUG nova.compute.manager [None req-fbe9aafe-7468-4383-a4ac-3c19fa16b89a tempest-ServerShowV257Test-1573549382 tempest-ServerShowV257Test-1573549382-project-member] [instance: b5fa1af4-0295-49ed-a101-7810e507bf64] Skipping network deallocation for instance since networking was not requested. {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 878.817915] env[69367]: INFO nova.scheduler.client.report [None req-6fa38a37-a07b-410a-8e73-9c3faa03718d tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Deleted allocations for instance 27684757-2b5f-4c20-901d-70a9e19cf4a5 [ 879.135420] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b7430c9-6989-43fe-8ace-379b3e2428d1 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.144330] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eb7021d-f2ca-4a57-91e1-9c91f3f79515 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.177866] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a828bfc-2f18-40db-9c23-7be6022463f7 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.185937] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21d31800-3a55-4482-afe0-fbae6d47b735 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.199595] env[69367]: DEBUG nova.compute.provider_tree [None req-9a266199-78e6-4e87-9d42-759928f6556d tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 879.328551] env[69367]: DEBUG oslo_concurrency.lockutils [None req-6fa38a37-a07b-410a-8e73-9c3faa03718d tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Lock "27684757-2b5f-4c20-901d-70a9e19cf4a5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.199s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 879.726723] env[69367]: ERROR nova.scheduler.client.report [None req-9a266199-78e6-4e87-9d42-759928f6556d tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] [req-e0506053-6068-4275-93ed-a9a34ff5e0b6] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-e0506053-6068-4275-93ed-a9a34ff5e0b6"}]} [ 879.727171] env[69367]: DEBUG oslo_concurrency.lockutils [None req-9a266199-78e6-4e87-9d42-759928f6556d tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.062s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 879.727800] env[69367]: ERROR nova.compute.manager [None req-9a266199-78e6-4e87-9d42-759928f6556d tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] [instance: 2086bd49-6926-4466-9ad0-74f9dbc8b31a] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 879.727800] env[69367]: ERROR nova.compute.manager [instance: 2086bd49-6926-4466-9ad0-74f9dbc8b31a] Traceback (most recent call last): [ 879.727800] env[69367]: ERROR nova.compute.manager [instance: 2086bd49-6926-4466-9ad0-74f9dbc8b31a] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 879.727800] env[69367]: ERROR nova.compute.manager [instance: 2086bd49-6926-4466-9ad0-74f9dbc8b31a] yield [ 879.727800] env[69367]: ERROR nova.compute.manager [instance: 2086bd49-6926-4466-9ad0-74f9dbc8b31a] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 879.727800] env[69367]: ERROR nova.compute.manager [instance: 2086bd49-6926-4466-9ad0-74f9dbc8b31a] self.set_inventory_for_provider( [ 879.727800] env[69367]: ERROR nova.compute.manager [instance: 2086bd49-6926-4466-9ad0-74f9dbc8b31a] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 879.727800] env[69367]: ERROR nova.compute.manager [instance: 2086bd49-6926-4466-9ad0-74f9dbc8b31a] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 879.727800] env[69367]: ERROR nova.compute.manager [instance: 2086bd49-6926-4466-9ad0-74f9dbc8b31a] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-e0506053-6068-4275-93ed-a9a34ff5e0b6"}]} [ 879.727800] env[69367]: ERROR nova.compute.manager [instance: 2086bd49-6926-4466-9ad0-74f9dbc8b31a] [ 879.727800] env[69367]: ERROR nova.compute.manager [instance: 2086bd49-6926-4466-9ad0-74f9dbc8b31a] During handling of the above exception, another exception occurred: [ 879.727800] env[69367]: ERROR nova.compute.manager [instance: 2086bd49-6926-4466-9ad0-74f9dbc8b31a] [ 879.727800] env[69367]: ERROR nova.compute.manager [instance: 2086bd49-6926-4466-9ad0-74f9dbc8b31a] Traceback (most recent call last): [ 879.727800] env[69367]: ERROR nova.compute.manager [instance: 2086bd49-6926-4466-9ad0-74f9dbc8b31a] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 879.727800] env[69367]: ERROR nova.compute.manager [instance: 2086bd49-6926-4466-9ad0-74f9dbc8b31a] with self.rt.instance_claim(context, instance, node, allocs, [ 879.727800] env[69367]: ERROR nova.compute.manager [instance: 2086bd49-6926-4466-9ad0-74f9dbc8b31a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 879.727800] env[69367]: ERROR nova.compute.manager [instance: 2086bd49-6926-4466-9ad0-74f9dbc8b31a] return f(*args, **kwargs) [ 879.727800] env[69367]: ERROR nova.compute.manager [instance: 2086bd49-6926-4466-9ad0-74f9dbc8b31a] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 879.727800] env[69367]: ERROR nova.compute.manager [instance: 2086bd49-6926-4466-9ad0-74f9dbc8b31a] self._update(elevated, cn) [ 879.727800] env[69367]: ERROR nova.compute.manager [instance: 2086bd49-6926-4466-9ad0-74f9dbc8b31a] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 879.727800] env[69367]: ERROR nova.compute.manager [instance: 2086bd49-6926-4466-9ad0-74f9dbc8b31a] self._update_to_placement(context, compute_node, startup) [ 879.727800] env[69367]: ERROR nova.compute.manager [instance: 2086bd49-6926-4466-9ad0-74f9dbc8b31a] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 879.727800] env[69367]: ERROR nova.compute.manager [instance: 2086bd49-6926-4466-9ad0-74f9dbc8b31a] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 879.727800] env[69367]: ERROR nova.compute.manager [instance: 2086bd49-6926-4466-9ad0-74f9dbc8b31a] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 879.727800] env[69367]: ERROR nova.compute.manager [instance: 2086bd49-6926-4466-9ad0-74f9dbc8b31a] return attempt.get(self._wrap_exception) [ 879.727800] env[69367]: ERROR nova.compute.manager [instance: 2086bd49-6926-4466-9ad0-74f9dbc8b31a] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 879.727800] env[69367]: ERROR nova.compute.manager [instance: 2086bd49-6926-4466-9ad0-74f9dbc8b31a] six.reraise(self.value[0], self.value[1], self.value[2]) [ 879.727800] env[69367]: ERROR nova.compute.manager [instance: 2086bd49-6926-4466-9ad0-74f9dbc8b31a] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 879.727800] env[69367]: ERROR nova.compute.manager [instance: 2086bd49-6926-4466-9ad0-74f9dbc8b31a] raise value [ 879.727800] env[69367]: ERROR nova.compute.manager [instance: 2086bd49-6926-4466-9ad0-74f9dbc8b31a] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 879.727800] env[69367]: ERROR nova.compute.manager [instance: 2086bd49-6926-4466-9ad0-74f9dbc8b31a] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 879.727800] env[69367]: ERROR nova.compute.manager [instance: 2086bd49-6926-4466-9ad0-74f9dbc8b31a] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 879.727800] env[69367]: ERROR nova.compute.manager [instance: 2086bd49-6926-4466-9ad0-74f9dbc8b31a] self.reportclient.update_from_provider_tree( [ 879.727800] env[69367]: ERROR nova.compute.manager [instance: 2086bd49-6926-4466-9ad0-74f9dbc8b31a] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 879.728802] env[69367]: ERROR nova.compute.manager [instance: 2086bd49-6926-4466-9ad0-74f9dbc8b31a] with catch_all(pd.uuid): [ 879.728802] env[69367]: ERROR nova.compute.manager [instance: 2086bd49-6926-4466-9ad0-74f9dbc8b31a] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 879.728802] env[69367]: ERROR nova.compute.manager [instance: 2086bd49-6926-4466-9ad0-74f9dbc8b31a] self.gen.throw(typ, value, traceback) [ 879.728802] env[69367]: ERROR nova.compute.manager [instance: 2086bd49-6926-4466-9ad0-74f9dbc8b31a] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 879.728802] env[69367]: ERROR nova.compute.manager [instance: 2086bd49-6926-4466-9ad0-74f9dbc8b31a] raise exception.ResourceProviderSyncFailed() [ 879.728802] env[69367]: ERROR nova.compute.manager [instance: 2086bd49-6926-4466-9ad0-74f9dbc8b31a] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 879.728802] env[69367]: ERROR nova.compute.manager [instance: 2086bd49-6926-4466-9ad0-74f9dbc8b31a] [ 879.728802] env[69367]: DEBUG nova.compute.utils [None req-9a266199-78e6-4e87-9d42-759928f6556d tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] [instance: 2086bd49-6926-4466-9ad0-74f9dbc8b31a] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 879.729662] env[69367]: DEBUG oslo_concurrency.lockutils [None req-9b5e4fb4-e357-4ec1-97d9-90f67718b83d tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.496s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 879.729880] env[69367]: DEBUG nova.objects.instance [None req-9b5e4fb4-e357-4ec1-97d9-90f67718b83d tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Lazy-loading 'resources' on Instance uuid 4a46d003-f57e-4089-aa60-757a4246f071 {{(pid=69367) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 879.731265] env[69367]: DEBUG nova.compute.manager [None req-9a266199-78e6-4e87-9d42-759928f6556d tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] [instance: 2086bd49-6926-4466-9ad0-74f9dbc8b31a] Build of instance 2086bd49-6926-4466-9ad0-74f9dbc8b31a was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 879.731726] env[69367]: DEBUG nova.compute.manager [None req-9a266199-78e6-4e87-9d42-759928f6556d tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] [instance: 2086bd49-6926-4466-9ad0-74f9dbc8b31a] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 879.731975] env[69367]: DEBUG oslo_concurrency.lockutils [None req-9a266199-78e6-4e87-9d42-759928f6556d tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Acquiring lock "refresh_cache-2086bd49-6926-4466-9ad0-74f9dbc8b31a" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 879.732209] env[69367]: DEBUG oslo_concurrency.lockutils [None req-9a266199-78e6-4e87-9d42-759928f6556d tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Acquired lock "refresh_cache-2086bd49-6926-4466-9ad0-74f9dbc8b31a" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 879.732409] env[69367]: DEBUG nova.network.neutron [None req-9a266199-78e6-4e87-9d42-759928f6556d tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] [instance: 2086bd49-6926-4466-9ad0-74f9dbc8b31a] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 879.827318] env[69367]: INFO nova.scheduler.client.report [None req-fbe9aafe-7468-4383-a4ac-3c19fa16b89a tempest-ServerShowV257Test-1573549382 tempest-ServerShowV257Test-1573549382-project-member] Deleted allocations for instance b5fa1af4-0295-49ed-a101-7810e507bf64 [ 880.253921] env[69367]: DEBUG nova.scheduler.client.report [None req-9b5e4fb4-e357-4ec1-97d9-90f67718b83d tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 880.257453] env[69367]: DEBUG nova.network.neutron [None req-9a266199-78e6-4e87-9d42-759928f6556d tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] [instance: 2086bd49-6926-4466-9ad0-74f9dbc8b31a] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 880.268845] env[69367]: DEBUG nova.scheduler.client.report [None req-9b5e4fb4-e357-4ec1-97d9-90f67718b83d tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 880.269096] env[69367]: DEBUG nova.compute.provider_tree [None req-9b5e4fb4-e357-4ec1-97d9-90f67718b83d tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 880.280645] env[69367]: DEBUG nova.scheduler.client.report [None req-9b5e4fb4-e357-4ec1-97d9-90f67718b83d tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 880.298124] env[69367]: DEBUG nova.scheduler.client.report [None req-9b5e4fb4-e357-4ec1-97d9-90f67718b83d tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 880.336822] env[69367]: DEBUG oslo_concurrency.lockutils [None req-fbe9aafe-7468-4383-a4ac-3c19fa16b89a tempest-ServerShowV257Test-1573549382 tempest-ServerShowV257Test-1573549382-project-member] Lock "b5fa1af4-0295-49ed-a101-7810e507bf64" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.127s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 880.381735] env[69367]: DEBUG nova.network.neutron [None req-9a266199-78e6-4e87-9d42-759928f6556d tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] [instance: 2086bd49-6926-4466-9ad0-74f9dbc8b31a] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 880.712027] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3181685b-8bfa-42d1-8b5a-e14206c74655 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.720356] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddceed31-2a46-4b67-beab-78a7ec3a457b {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.760223] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c2476a3-4a72-4c80-abd8-f771c190aaa3 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.768505] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc34b6a2-fb6c-457d-88de-029f426b7e77 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.782900] env[69367]: DEBUG nova.compute.provider_tree [None req-9b5e4fb4-e357-4ec1-97d9-90f67718b83d tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 880.883869] env[69367]: DEBUG oslo_concurrency.lockutils [None req-9a266199-78e6-4e87-9d42-759928f6556d tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Releasing lock "refresh_cache-2086bd49-6926-4466-9ad0-74f9dbc8b31a" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 880.883869] env[69367]: DEBUG nova.compute.manager [None req-9a266199-78e6-4e87-9d42-759928f6556d tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 880.884066] env[69367]: DEBUG nova.compute.manager [None req-9a266199-78e6-4e87-9d42-759928f6556d tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] [instance: 2086bd49-6926-4466-9ad0-74f9dbc8b31a] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 880.884351] env[69367]: DEBUG nova.network.neutron [None req-9a266199-78e6-4e87-9d42-759928f6556d tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] [instance: 2086bd49-6926-4466-9ad0-74f9dbc8b31a] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 880.901533] env[69367]: DEBUG nova.network.neutron [None req-9a266199-78e6-4e87-9d42-759928f6556d tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] [instance: 2086bd49-6926-4466-9ad0-74f9dbc8b31a] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 881.307537] env[69367]: ERROR nova.scheduler.client.report [None req-9b5e4fb4-e357-4ec1-97d9-90f67718b83d tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [req-822a58e9-b9b0-4a6e-a903-2c5a9b06e389] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-822a58e9-b9b0-4a6e-a903-2c5a9b06e389"}]} [ 881.308232] env[69367]: DEBUG oslo_concurrency.lockutils [None req-9b5e4fb4-e357-4ec1-97d9-90f67718b83d tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.578s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 881.308969] env[69367]: ERROR nova.compute.manager [None req-9b5e4fb4-e357-4ec1-97d9-90f67718b83d tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 4a46d003-f57e-4089-aa60-757a4246f071] Setting instance vm_state to ERROR: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 881.308969] env[69367]: ERROR nova.compute.manager [instance: 4a46d003-f57e-4089-aa60-757a4246f071] Traceback (most recent call last): [ 881.308969] env[69367]: ERROR nova.compute.manager [instance: 4a46d003-f57e-4089-aa60-757a4246f071] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 881.308969] env[69367]: ERROR nova.compute.manager [instance: 4a46d003-f57e-4089-aa60-757a4246f071] yield [ 881.308969] env[69367]: ERROR nova.compute.manager [instance: 4a46d003-f57e-4089-aa60-757a4246f071] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 881.308969] env[69367]: ERROR nova.compute.manager [instance: 4a46d003-f57e-4089-aa60-757a4246f071] self.set_inventory_for_provider( [ 881.308969] env[69367]: ERROR nova.compute.manager [instance: 4a46d003-f57e-4089-aa60-757a4246f071] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 881.308969] env[69367]: ERROR nova.compute.manager [instance: 4a46d003-f57e-4089-aa60-757a4246f071] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 881.308969] env[69367]: ERROR nova.compute.manager [instance: 4a46d003-f57e-4089-aa60-757a4246f071] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-822a58e9-b9b0-4a6e-a903-2c5a9b06e389"}]} [ 881.308969] env[69367]: ERROR nova.compute.manager [instance: 4a46d003-f57e-4089-aa60-757a4246f071] [ 881.308969] env[69367]: ERROR nova.compute.manager [instance: 4a46d003-f57e-4089-aa60-757a4246f071] During handling of the above exception, another exception occurred: [ 881.308969] env[69367]: ERROR nova.compute.manager [instance: 4a46d003-f57e-4089-aa60-757a4246f071] [ 881.308969] env[69367]: ERROR nova.compute.manager [instance: 4a46d003-f57e-4089-aa60-757a4246f071] Traceback (most recent call last): [ 881.308969] env[69367]: ERROR nova.compute.manager [instance: 4a46d003-f57e-4089-aa60-757a4246f071] File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 881.308969] env[69367]: ERROR nova.compute.manager [instance: 4a46d003-f57e-4089-aa60-757a4246f071] self._delete_instance(context, instance, bdms) [ 881.308969] env[69367]: ERROR nova.compute.manager [instance: 4a46d003-f57e-4089-aa60-757a4246f071] File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 881.308969] env[69367]: ERROR nova.compute.manager [instance: 4a46d003-f57e-4089-aa60-757a4246f071] self._complete_deletion(context, instance) [ 881.308969] env[69367]: ERROR nova.compute.manager [instance: 4a46d003-f57e-4089-aa60-757a4246f071] File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 881.308969] env[69367]: ERROR nova.compute.manager [instance: 4a46d003-f57e-4089-aa60-757a4246f071] self._update_resource_tracker(context, instance) [ 881.308969] env[69367]: ERROR nova.compute.manager [instance: 4a46d003-f57e-4089-aa60-757a4246f071] File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 881.308969] env[69367]: ERROR nova.compute.manager [instance: 4a46d003-f57e-4089-aa60-757a4246f071] self.rt.update_usage(context, instance, instance.node) [ 881.308969] env[69367]: ERROR nova.compute.manager [instance: 4a46d003-f57e-4089-aa60-757a4246f071] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 881.308969] env[69367]: ERROR nova.compute.manager [instance: 4a46d003-f57e-4089-aa60-757a4246f071] return f(*args, **kwargs) [ 881.308969] env[69367]: ERROR nova.compute.manager [instance: 4a46d003-f57e-4089-aa60-757a4246f071] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 881.308969] env[69367]: ERROR nova.compute.manager [instance: 4a46d003-f57e-4089-aa60-757a4246f071] self._update(context.elevated(), self.compute_nodes[nodename]) [ 881.308969] env[69367]: ERROR nova.compute.manager [instance: 4a46d003-f57e-4089-aa60-757a4246f071] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 881.308969] env[69367]: ERROR nova.compute.manager [instance: 4a46d003-f57e-4089-aa60-757a4246f071] self._update_to_placement(context, compute_node, startup) [ 881.308969] env[69367]: ERROR nova.compute.manager [instance: 4a46d003-f57e-4089-aa60-757a4246f071] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 881.308969] env[69367]: ERROR nova.compute.manager [instance: 4a46d003-f57e-4089-aa60-757a4246f071] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 881.308969] env[69367]: ERROR nova.compute.manager [instance: 4a46d003-f57e-4089-aa60-757a4246f071] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 881.308969] env[69367]: ERROR nova.compute.manager [instance: 4a46d003-f57e-4089-aa60-757a4246f071] return attempt.get(self._wrap_exception) [ 881.308969] env[69367]: ERROR nova.compute.manager [instance: 4a46d003-f57e-4089-aa60-757a4246f071] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 881.308969] env[69367]: ERROR nova.compute.manager [instance: 4a46d003-f57e-4089-aa60-757a4246f071] six.reraise(self.value[0], self.value[1], self.value[2]) [ 881.308969] env[69367]: ERROR nova.compute.manager [instance: 4a46d003-f57e-4089-aa60-757a4246f071] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 881.310102] env[69367]: ERROR nova.compute.manager [instance: 4a46d003-f57e-4089-aa60-757a4246f071] raise value [ 881.310102] env[69367]: ERROR nova.compute.manager [instance: 4a46d003-f57e-4089-aa60-757a4246f071] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 881.310102] env[69367]: ERROR nova.compute.manager [instance: 4a46d003-f57e-4089-aa60-757a4246f071] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 881.310102] env[69367]: ERROR nova.compute.manager [instance: 4a46d003-f57e-4089-aa60-757a4246f071] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 881.310102] env[69367]: ERROR nova.compute.manager [instance: 4a46d003-f57e-4089-aa60-757a4246f071] self.reportclient.update_from_provider_tree( [ 881.310102] env[69367]: ERROR nova.compute.manager [instance: 4a46d003-f57e-4089-aa60-757a4246f071] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 881.310102] env[69367]: ERROR nova.compute.manager [instance: 4a46d003-f57e-4089-aa60-757a4246f071] with catch_all(pd.uuid): [ 881.310102] env[69367]: ERROR nova.compute.manager [instance: 4a46d003-f57e-4089-aa60-757a4246f071] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 881.310102] env[69367]: ERROR nova.compute.manager [instance: 4a46d003-f57e-4089-aa60-757a4246f071] self.gen.throw(typ, value, traceback) [ 881.310102] env[69367]: ERROR nova.compute.manager [instance: 4a46d003-f57e-4089-aa60-757a4246f071] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 881.310102] env[69367]: ERROR nova.compute.manager [instance: 4a46d003-f57e-4089-aa60-757a4246f071] raise exception.ResourceProviderSyncFailed() [ 881.310102] env[69367]: ERROR nova.compute.manager [instance: 4a46d003-f57e-4089-aa60-757a4246f071] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 881.310102] env[69367]: ERROR nova.compute.manager [instance: 4a46d003-f57e-4089-aa60-757a4246f071] [ 881.311535] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5fd10dcf-a94e-4432-b8c9-227cd572cc3a tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.780s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 881.311768] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5fd10dcf-a94e-4432-b8c9-227cd572cc3a tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 881.311961] env[69367]: INFO nova.compute.manager [None req-5fd10dcf-a94e-4432-b8c9-227cd572cc3a tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Successfully reverted task state from None on failure for instance. [ 881.314570] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e35df00c-08dc-425a-a9e2-95b6e24a7398 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.699s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 881.314826] env[69367]: DEBUG nova.objects.instance [None req-e35df00c-08dc-425a-a9e2-95b6e24a7398 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Lazy-loading 'resources' on Instance uuid c272b0ae-6313-46ab-977c-6de255e77675 {{(pid=69367) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 881.316922] env[69367]: ERROR oslo_messaging.rpc.server [None req-5fd10dcf-a94e-4432-b8c9-227cd572cc3a tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Exception during message handling: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 881.316922] env[69367]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 881.316922] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 881.316922] env[69367]: ERROR oslo_messaging.rpc.server yield [ 881.316922] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 881.316922] env[69367]: ERROR oslo_messaging.rpc.server self.set_inventory_for_provider( [ 881.316922] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 881.316922] env[69367]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 881.316922] env[69367]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-06edf124-89e9-45f0-a8c1-4cea2809d0a6"}]} [ 881.316922] env[69367]: ERROR oslo_messaging.rpc.server [ 881.316922] env[69367]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 881.316922] env[69367]: ERROR oslo_messaging.rpc.server [ 881.316922] env[69367]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 881.316922] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 174, in _process_incoming [ 881.316922] env[69367]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 881.316922] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 881.316922] env[69367]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 881.316922] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 881.316922] env[69367]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 881.316922] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 881.316922] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 881.316922] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 881.316922] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 881.316922] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 881.316922] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 881.316922] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 881.316922] env[69367]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 881.316922] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 167, in decorated_function [ 881.316922] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 881.316922] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 881.316922] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 881.316922] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 881.316922] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 881.316922] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 158, in decorated_function [ 881.316922] env[69367]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 881.316922] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1483, in decorated_function [ 881.316922] env[69367]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 881.316922] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 214, in decorated_function [ 881.316922] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 881.316922] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 881.316922] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 881.316922] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 881.316922] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 881.316922] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 204, in decorated_function [ 881.316922] env[69367]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 881.316922] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3388, in terminate_instance [ 881.316922] env[69367]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 881.319716] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 881.319716] env[69367]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 881.319716] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3383, in do_terminate_instance [ 881.319716] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 881.319716] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 881.319716] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 881.319716] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 881.319716] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 881.319716] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 881.319716] env[69367]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 881.319716] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 881.319716] env[69367]: ERROR oslo_messaging.rpc.server self._complete_deletion(context, instance) [ 881.319716] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 881.319716] env[69367]: ERROR oslo_messaging.rpc.server self._update_resource_tracker(context, instance) [ 881.319716] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 881.319716] env[69367]: ERROR oslo_messaging.rpc.server self.rt.update_usage(context, instance, instance.node) [ 881.319716] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 881.319716] env[69367]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 881.319716] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 881.319716] env[69367]: ERROR oslo_messaging.rpc.server self._update(context.elevated(), self.compute_nodes[nodename]) [ 881.319716] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 881.319716] env[69367]: ERROR oslo_messaging.rpc.server self._update_to_placement(context, compute_node, startup) [ 881.319716] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 881.319716] env[69367]: ERROR oslo_messaging.rpc.server return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 881.319716] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 881.319716] env[69367]: ERROR oslo_messaging.rpc.server return attempt.get(self._wrap_exception) [ 881.319716] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 881.319716] env[69367]: ERROR oslo_messaging.rpc.server six.reraise(self.value[0], self.value[1], self.value[2]) [ 881.319716] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 881.319716] env[69367]: ERROR oslo_messaging.rpc.server raise value [ 881.319716] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 881.319716] env[69367]: ERROR oslo_messaging.rpc.server attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 881.319716] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 881.319716] env[69367]: ERROR oslo_messaging.rpc.server self.reportclient.update_from_provider_tree( [ 881.319716] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 881.319716] env[69367]: ERROR oslo_messaging.rpc.server with catch_all(pd.uuid): [ 881.319716] env[69367]: ERROR oslo_messaging.rpc.server File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 881.319716] env[69367]: ERROR oslo_messaging.rpc.server self.gen.throw(typ, value, traceback) [ 881.319716] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 881.319716] env[69367]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderSyncFailed() [ 881.319716] env[69367]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 881.319716] env[69367]: ERROR oslo_messaging.rpc.server [ 881.404079] env[69367]: DEBUG nova.network.neutron [None req-9a266199-78e6-4e87-9d42-759928f6556d tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] [instance: 2086bd49-6926-4466-9ad0-74f9dbc8b31a] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 881.819842] env[69367]: DEBUG oslo_concurrency.lockutils [None req-9b5e4fb4-e357-4ec1-97d9-90f67718b83d tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Lock "4a46d003-f57e-4089-aa60-757a4246f071" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.578s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 881.838528] env[69367]: DEBUG nova.scheduler.client.report [None req-e35df00c-08dc-425a-a9e2-95b6e24a7398 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 881.852547] env[69367]: DEBUG nova.scheduler.client.report [None req-e35df00c-08dc-425a-a9e2-95b6e24a7398 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 881.852547] env[69367]: DEBUG nova.compute.provider_tree [None req-e35df00c-08dc-425a-a9e2-95b6e24a7398 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Updating resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 generation from 101 to 102 during operation: update_inventory {{(pid=69367) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 881.852547] env[69367]: DEBUG nova.compute.provider_tree [None req-e35df00c-08dc-425a-a9e2-95b6e24a7398 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 881.864415] env[69367]: DEBUG nova.scheduler.client.report [None req-e35df00c-08dc-425a-a9e2-95b6e24a7398 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 881.881698] env[69367]: DEBUG nova.scheduler.client.report [None req-e35df00c-08dc-425a-a9e2-95b6e24a7398 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 881.913212] env[69367]: INFO nova.compute.manager [None req-9a266199-78e6-4e87-9d42-759928f6556d tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] [instance: 2086bd49-6926-4466-9ad0-74f9dbc8b31a] Took 1.03 seconds to deallocate network for instance. [ 882.295030] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0332a79d-b2a5-4d0a-8533-c241761eec42 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.304476] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52ac723d-07a3-4edc-aaab-390e4ab5516c {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.337744] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-877cbfe6-74af-4578-9901-03f93da5597a {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.345703] env[69367]: DEBUG oslo_concurrency.lockutils [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Acquiring lock "42db60d9-e5f7-4925-8f6f-d3884687414a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 882.345798] env[69367]: DEBUG oslo_concurrency.lockutils [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Lock "42db60d9-e5f7-4925-8f6f-d3884687414a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 882.350617] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11e44480-bcff-49f9-809c-63589a690ff4 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.366883] env[69367]: DEBUG nova.compute.provider_tree [None req-e35df00c-08dc-425a-a9e2-95b6e24a7398 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 882.852031] env[69367]: DEBUG nova.compute.manager [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 42db60d9-e5f7-4925-8f6f-d3884687414a] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 882.891968] env[69367]: ERROR nova.scheduler.client.report [None req-e35df00c-08dc-425a-a9e2-95b6e24a7398 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] [req-817b52a6-ba9b-4045-8df8-f8cd2f797b4d] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-817b52a6-ba9b-4045-8df8-f8cd2f797b4d"}]} [ 882.892767] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e35df00c-08dc-425a-a9e2-95b6e24a7398 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.578s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 882.893412] env[69367]: ERROR nova.compute.manager [None req-e35df00c-08dc-425a-a9e2-95b6e24a7398 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] [instance: c272b0ae-6313-46ab-977c-6de255e77675] Setting instance vm_state to ERROR: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 882.893412] env[69367]: ERROR nova.compute.manager [instance: c272b0ae-6313-46ab-977c-6de255e77675] Traceback (most recent call last): [ 882.893412] env[69367]: ERROR nova.compute.manager [instance: c272b0ae-6313-46ab-977c-6de255e77675] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 882.893412] env[69367]: ERROR nova.compute.manager [instance: c272b0ae-6313-46ab-977c-6de255e77675] yield [ 882.893412] env[69367]: ERROR nova.compute.manager [instance: c272b0ae-6313-46ab-977c-6de255e77675] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 882.893412] env[69367]: ERROR nova.compute.manager [instance: c272b0ae-6313-46ab-977c-6de255e77675] self.set_inventory_for_provider( [ 882.893412] env[69367]: ERROR nova.compute.manager [instance: c272b0ae-6313-46ab-977c-6de255e77675] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 882.893412] env[69367]: ERROR nova.compute.manager [instance: c272b0ae-6313-46ab-977c-6de255e77675] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 882.893412] env[69367]: ERROR nova.compute.manager [instance: c272b0ae-6313-46ab-977c-6de255e77675] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-817b52a6-ba9b-4045-8df8-f8cd2f797b4d"}]} [ 882.893412] env[69367]: ERROR nova.compute.manager [instance: c272b0ae-6313-46ab-977c-6de255e77675] [ 882.893412] env[69367]: ERROR nova.compute.manager [instance: c272b0ae-6313-46ab-977c-6de255e77675] During handling of the above exception, another exception occurred: [ 882.893412] env[69367]: ERROR nova.compute.manager [instance: c272b0ae-6313-46ab-977c-6de255e77675] [ 882.893412] env[69367]: ERROR nova.compute.manager [instance: c272b0ae-6313-46ab-977c-6de255e77675] Traceback (most recent call last): [ 882.893412] env[69367]: ERROR nova.compute.manager [instance: c272b0ae-6313-46ab-977c-6de255e77675] File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 882.893412] env[69367]: ERROR nova.compute.manager [instance: c272b0ae-6313-46ab-977c-6de255e77675] self._delete_instance(context, instance, bdms) [ 882.893412] env[69367]: ERROR nova.compute.manager [instance: c272b0ae-6313-46ab-977c-6de255e77675] File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 882.893412] env[69367]: ERROR nova.compute.manager [instance: c272b0ae-6313-46ab-977c-6de255e77675] self._complete_deletion(context, instance) [ 882.893412] env[69367]: ERROR nova.compute.manager [instance: c272b0ae-6313-46ab-977c-6de255e77675] File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 882.893412] env[69367]: ERROR nova.compute.manager [instance: c272b0ae-6313-46ab-977c-6de255e77675] self._update_resource_tracker(context, instance) [ 882.893412] env[69367]: ERROR nova.compute.manager [instance: c272b0ae-6313-46ab-977c-6de255e77675] File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 882.893412] env[69367]: ERROR nova.compute.manager [instance: c272b0ae-6313-46ab-977c-6de255e77675] self.rt.update_usage(context, instance, instance.node) [ 882.893412] env[69367]: ERROR nova.compute.manager [instance: c272b0ae-6313-46ab-977c-6de255e77675] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 882.893412] env[69367]: ERROR nova.compute.manager [instance: c272b0ae-6313-46ab-977c-6de255e77675] return f(*args, **kwargs) [ 882.893412] env[69367]: ERROR nova.compute.manager [instance: c272b0ae-6313-46ab-977c-6de255e77675] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 882.893412] env[69367]: ERROR nova.compute.manager [instance: c272b0ae-6313-46ab-977c-6de255e77675] self._update(context.elevated(), self.compute_nodes[nodename]) [ 882.893412] env[69367]: ERROR nova.compute.manager [instance: c272b0ae-6313-46ab-977c-6de255e77675] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 882.893412] env[69367]: ERROR nova.compute.manager [instance: c272b0ae-6313-46ab-977c-6de255e77675] self._update_to_placement(context, compute_node, startup) [ 882.893412] env[69367]: ERROR nova.compute.manager [instance: c272b0ae-6313-46ab-977c-6de255e77675] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 882.893412] env[69367]: ERROR nova.compute.manager [instance: c272b0ae-6313-46ab-977c-6de255e77675] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 882.893412] env[69367]: ERROR nova.compute.manager [instance: c272b0ae-6313-46ab-977c-6de255e77675] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 882.893412] env[69367]: ERROR nova.compute.manager [instance: c272b0ae-6313-46ab-977c-6de255e77675] return attempt.get(self._wrap_exception) [ 882.893412] env[69367]: ERROR nova.compute.manager [instance: c272b0ae-6313-46ab-977c-6de255e77675] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 882.893412] env[69367]: ERROR nova.compute.manager [instance: c272b0ae-6313-46ab-977c-6de255e77675] six.reraise(self.value[0], self.value[1], self.value[2]) [ 882.893412] env[69367]: ERROR nova.compute.manager [instance: c272b0ae-6313-46ab-977c-6de255e77675] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 882.894575] env[69367]: ERROR nova.compute.manager [instance: c272b0ae-6313-46ab-977c-6de255e77675] raise value [ 882.894575] env[69367]: ERROR nova.compute.manager [instance: c272b0ae-6313-46ab-977c-6de255e77675] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 882.894575] env[69367]: ERROR nova.compute.manager [instance: c272b0ae-6313-46ab-977c-6de255e77675] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 882.894575] env[69367]: ERROR nova.compute.manager [instance: c272b0ae-6313-46ab-977c-6de255e77675] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 882.894575] env[69367]: ERROR nova.compute.manager [instance: c272b0ae-6313-46ab-977c-6de255e77675] self.reportclient.update_from_provider_tree( [ 882.894575] env[69367]: ERROR nova.compute.manager [instance: c272b0ae-6313-46ab-977c-6de255e77675] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 882.894575] env[69367]: ERROR nova.compute.manager [instance: c272b0ae-6313-46ab-977c-6de255e77675] with catch_all(pd.uuid): [ 882.894575] env[69367]: ERROR nova.compute.manager [instance: c272b0ae-6313-46ab-977c-6de255e77675] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 882.894575] env[69367]: ERROR nova.compute.manager [instance: c272b0ae-6313-46ab-977c-6de255e77675] self.gen.throw(typ, value, traceback) [ 882.894575] env[69367]: ERROR nova.compute.manager [instance: c272b0ae-6313-46ab-977c-6de255e77675] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 882.894575] env[69367]: ERROR nova.compute.manager [instance: c272b0ae-6313-46ab-977c-6de255e77675] raise exception.ResourceProviderSyncFailed() [ 882.894575] env[69367]: ERROR nova.compute.manager [instance: c272b0ae-6313-46ab-977c-6de255e77675] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 882.894575] env[69367]: ERROR nova.compute.manager [instance: c272b0ae-6313-46ab-977c-6de255e77675] [ 882.895697] env[69367]: DEBUG oslo_concurrency.lockutils [None req-34a62984-6384-406f-821f-265cad7ab0f7 tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.765s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 882.897604] env[69367]: INFO nova.compute.claims [None req-34a62984-6384-406f-821f-265cad7ab0f7 tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: f311f965-e846-4519-8375-ffd831e6afc7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 882.944719] env[69367]: INFO nova.scheduler.client.report [None req-9a266199-78e6-4e87-9d42-759928f6556d tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Deleted allocations for instance 2086bd49-6926-4466-9ad0-74f9dbc8b31a [ 882.950338] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5e205b40-cfcf-4563-9c86-1720cbb819cc tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Acquiring lock "4a46d003-f57e-4089-aa60-757a4246f071" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 882.950583] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5e205b40-cfcf-4563-9c86-1720cbb819cc tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Lock "4a46d003-f57e-4089-aa60-757a4246f071" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 882.950784] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5e205b40-cfcf-4563-9c86-1720cbb819cc tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Acquiring lock "4a46d003-f57e-4089-aa60-757a4246f071-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 882.950969] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5e205b40-cfcf-4563-9c86-1720cbb819cc tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Lock "4a46d003-f57e-4089-aa60-757a4246f071-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 882.951163] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5e205b40-cfcf-4563-9c86-1720cbb819cc tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Lock "4a46d003-f57e-4089-aa60-757a4246f071-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 882.954415] env[69367]: INFO nova.compute.manager [None req-5e205b40-cfcf-4563-9c86-1720cbb819cc tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 4a46d003-f57e-4089-aa60-757a4246f071] Terminating instance [ 883.010537] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a6fb10fd-5288-49e0-b35b-8dedcaa9fd7f tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Acquiring lock "46b6bc45-57f0-4850-9249-6bbb22b162c6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 883.010834] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a6fb10fd-5288-49e0-b35b-8dedcaa9fd7f tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Lock "46b6bc45-57f0-4850-9249-6bbb22b162c6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 883.011069] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a6fb10fd-5288-49e0-b35b-8dedcaa9fd7f tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Acquiring lock "46b6bc45-57f0-4850-9249-6bbb22b162c6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 883.011265] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a6fb10fd-5288-49e0-b35b-8dedcaa9fd7f tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Lock "46b6bc45-57f0-4850-9249-6bbb22b162c6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 883.011527] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a6fb10fd-5288-49e0-b35b-8dedcaa9fd7f tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Lock "46b6bc45-57f0-4850-9249-6bbb22b162c6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 883.013689] env[69367]: INFO nova.compute.manager [None req-a6fb10fd-5288-49e0-b35b-8dedcaa9fd7f tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 46b6bc45-57f0-4850-9249-6bbb22b162c6] Terminating instance [ 883.074611] env[69367]: DEBUG oslo_concurrency.lockutils [None req-775ed4e5-8bd2-4d2c-8d8d-3e33cb192c86 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Acquiring lock "54a1f586-481d-427e-ba0b-be90e5573bd3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 883.074862] env[69367]: DEBUG oslo_concurrency.lockutils [None req-775ed4e5-8bd2-4d2c-8d8d-3e33cb192c86 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Lock "54a1f586-481d-427e-ba0b-be90e5573bd3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 883.075110] env[69367]: DEBUG oslo_concurrency.lockutils [None req-775ed4e5-8bd2-4d2c-8d8d-3e33cb192c86 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Acquiring lock "54a1f586-481d-427e-ba0b-be90e5573bd3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 883.075315] env[69367]: DEBUG oslo_concurrency.lockutils [None req-775ed4e5-8bd2-4d2c-8d8d-3e33cb192c86 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Lock "54a1f586-481d-427e-ba0b-be90e5573bd3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 883.075493] env[69367]: DEBUG oslo_concurrency.lockutils [None req-775ed4e5-8bd2-4d2c-8d8d-3e33cb192c86 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Lock "54a1f586-481d-427e-ba0b-be90e5573bd3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 883.077582] env[69367]: INFO nova.compute.manager [None req-775ed4e5-8bd2-4d2c-8d8d-3e33cb192c86 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 54a1f586-481d-427e-ba0b-be90e5573bd3] Terminating instance [ 883.346056] env[69367]: DEBUG oslo_concurrency.lockutils [None req-9b5e4fb4-e357-4ec1-97d9-90f67718b83d tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 883.370294] env[69367]: DEBUG oslo_concurrency.lockutils [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 883.402732] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e35df00c-08dc-425a-a9e2-95b6e24a7398 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Lock "c272b0ae-6313-46ab-977c-6de255e77675" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.005s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 883.456363] env[69367]: DEBUG oslo_concurrency.lockutils [None req-9a266199-78e6-4e87-9d42-759928f6556d tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Lock "2086bd49-6926-4466-9ad0-74f9dbc8b31a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.846s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 883.458127] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5e205b40-cfcf-4563-9c86-1720cbb819cc tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Acquiring lock "refresh_cache-4a46d003-f57e-4089-aa60-757a4246f071" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.458266] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5e205b40-cfcf-4563-9c86-1720cbb819cc tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Acquired lock "refresh_cache-4a46d003-f57e-4089-aa60-757a4246f071" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 883.458452] env[69367]: DEBUG nova.network.neutron [None req-5e205b40-cfcf-4563-9c86-1720cbb819cc tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 4a46d003-f57e-4089-aa60-757a4246f071] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 883.517538] env[69367]: DEBUG nova.compute.manager [None req-a6fb10fd-5288-49e0-b35b-8dedcaa9fd7f tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 46b6bc45-57f0-4850-9249-6bbb22b162c6] Start destroying the instance on the hypervisor. {{(pid=69367) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 883.517811] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-a6fb10fd-5288-49e0-b35b-8dedcaa9fd7f tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 46b6bc45-57f0-4850-9249-6bbb22b162c6] Destroying instance {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 883.518720] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98bb46cd-6791-4bc1-8ca5-99c4af93207c {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.526883] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6fb10fd-5288-49e0-b35b-8dedcaa9fd7f tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 46b6bc45-57f0-4850-9249-6bbb22b162c6] Powering off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 883.528048] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-61811109-e392-4013-815f-b5ac389bb765 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.533900] env[69367]: DEBUG oslo_vmware.api [None req-a6fb10fd-5288-49e0-b35b-8dedcaa9fd7f tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Waiting for the task: (returnval){ [ 883.533900] env[69367]: value = "task-4234099" [ 883.533900] env[69367]: _type = "Task" [ 883.533900] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.543647] env[69367]: DEBUG oslo_vmware.api [None req-a6fb10fd-5288-49e0-b35b-8dedcaa9fd7f tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Task: {'id': task-4234099, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.581331] env[69367]: DEBUG nova.compute.manager [None req-775ed4e5-8bd2-4d2c-8d8d-3e33cb192c86 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 54a1f586-481d-427e-ba0b-be90e5573bd3] Start destroying the instance on the hypervisor. {{(pid=69367) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 883.581744] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-775ed4e5-8bd2-4d2c-8d8d-3e33cb192c86 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 54a1f586-481d-427e-ba0b-be90e5573bd3] Destroying instance {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 883.582784] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfda7163-f3ba-42e0-8cd2-fa5454292aa2 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.591025] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-775ed4e5-8bd2-4d2c-8d8d-3e33cb192c86 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 54a1f586-481d-427e-ba0b-be90e5573bd3] Powering off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 883.591330] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3dd0f46d-6f28-44e8-ac24-2d2e0c09bdbb {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.598944] env[69367]: DEBUG oslo_vmware.api [None req-775ed4e5-8bd2-4d2c-8d8d-3e33cb192c86 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Waiting for the task: (returnval){ [ 883.598944] env[69367]: value = "task-4234100" [ 883.598944] env[69367]: _type = "Task" [ 883.598944] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.608293] env[69367]: DEBUG oslo_vmware.api [None req-775ed4e5-8bd2-4d2c-8d8d-3e33cb192c86 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Task: {'id': task-4234100, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.929875] env[69367]: DEBUG nova.scheduler.client.report [None req-34a62984-6384-406f-821f-265cad7ab0f7 tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 883.948446] env[69367]: DEBUG nova.scheduler.client.report [None req-34a62984-6384-406f-821f-265cad7ab0f7 tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 883.948627] env[69367]: DEBUG nova.compute.provider_tree [None req-34a62984-6384-406f-821f-265cad7ab0f7 tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 883.960457] env[69367]: DEBUG nova.scheduler.client.report [None req-34a62984-6384-406f-821f-265cad7ab0f7 tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 883.987738] env[69367]: DEBUG nova.scheduler.client.report [None req-34a62984-6384-406f-821f-265cad7ab0f7 tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 883.995320] env[69367]: DEBUG nova.network.neutron [None req-5e205b40-cfcf-4563-9c86-1720cbb819cc tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 4a46d003-f57e-4089-aa60-757a4246f071] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 884.048688] env[69367]: DEBUG oslo_vmware.api [None req-a6fb10fd-5288-49e0-b35b-8dedcaa9fd7f tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Task: {'id': task-4234099, 'name': PowerOffVM_Task, 'duration_secs': 0.268122} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.049009] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6fb10fd-5288-49e0-b35b-8dedcaa9fd7f tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 46b6bc45-57f0-4850-9249-6bbb22b162c6] Powered off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 884.049191] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-a6fb10fd-5288-49e0-b35b-8dedcaa9fd7f tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 46b6bc45-57f0-4850-9249-6bbb22b162c6] Unregistering the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 884.049462] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0314ce17-ed93-465f-be8e-ed0ca4fe60c3 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.112493] env[69367]: DEBUG oslo_vmware.api [None req-775ed4e5-8bd2-4d2c-8d8d-3e33cb192c86 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Task: {'id': task-4234100, 'name': PowerOffVM_Task, 'duration_secs': 0.20846} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.113731] env[69367]: DEBUG nova.network.neutron [None req-5e205b40-cfcf-4563-9c86-1720cbb819cc tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 4a46d003-f57e-4089-aa60-757a4246f071] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 884.117950] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-775ed4e5-8bd2-4d2c-8d8d-3e33cb192c86 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 54a1f586-481d-427e-ba0b-be90e5573bd3] Powered off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 884.118075] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-775ed4e5-8bd2-4d2c-8d8d-3e33cb192c86 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 54a1f586-481d-427e-ba0b-be90e5573bd3] Unregistering the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 884.119901] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-51e5166e-127c-4998-b68f-475d28e9d76a {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.121683] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-a6fb10fd-5288-49e0-b35b-8dedcaa9fd7f tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 46b6bc45-57f0-4850-9249-6bbb22b162c6] Unregistered the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 884.121922] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-a6fb10fd-5288-49e0-b35b-8dedcaa9fd7f tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 46b6bc45-57f0-4850-9249-6bbb22b162c6] Deleting contents of the VM from datastore datastore1 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 884.122176] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6fb10fd-5288-49e0-b35b-8dedcaa9fd7f tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Deleting the datastore file [datastore1] 46b6bc45-57f0-4850-9249-6bbb22b162c6 {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 884.122440] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7634a9b5-8c48-4b14-bfcd-5e6b241a24eb {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.133209] env[69367]: DEBUG oslo_vmware.api [None req-a6fb10fd-5288-49e0-b35b-8dedcaa9fd7f tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Waiting for the task: (returnval){ [ 884.133209] env[69367]: value = "task-4234103" [ 884.133209] env[69367]: _type = "Task" [ 884.133209] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.143259] env[69367]: DEBUG oslo_vmware.api [None req-a6fb10fd-5288-49e0-b35b-8dedcaa9fd7f tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Task: {'id': task-4234103, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.192370] env[69367]: DEBUG oslo_concurrency.lockutils [None req-9920216c-2ae6-4fb7-81d0-fbadfd2aa9c5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Acquiring lock "d900df05-b65c-4a45-94d1-563afbf9c022" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 884.192651] env[69367]: DEBUG oslo_concurrency.lockutils [None req-9920216c-2ae6-4fb7-81d0-fbadfd2aa9c5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Lock "d900df05-b65c-4a45-94d1-563afbf9c022" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 884.192864] env[69367]: DEBUG oslo_concurrency.lockutils [None req-9920216c-2ae6-4fb7-81d0-fbadfd2aa9c5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Acquiring lock "d900df05-b65c-4a45-94d1-563afbf9c022-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 884.193063] env[69367]: DEBUG oslo_concurrency.lockutils [None req-9920216c-2ae6-4fb7-81d0-fbadfd2aa9c5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Lock "d900df05-b65c-4a45-94d1-563afbf9c022-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 884.193253] env[69367]: DEBUG oslo_concurrency.lockutils [None req-9920216c-2ae6-4fb7-81d0-fbadfd2aa9c5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Lock "d900df05-b65c-4a45-94d1-563afbf9c022-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 884.194992] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-775ed4e5-8bd2-4d2c-8d8d-3e33cb192c86 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 54a1f586-481d-427e-ba0b-be90e5573bd3] Unregistered the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 884.195211] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-775ed4e5-8bd2-4d2c-8d8d-3e33cb192c86 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 54a1f586-481d-427e-ba0b-be90e5573bd3] Deleting contents of the VM from datastore datastore1 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 884.195397] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-775ed4e5-8bd2-4d2c-8d8d-3e33cb192c86 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Deleting the datastore file [datastore1] 54a1f586-481d-427e-ba0b-be90e5573bd3 {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 884.195894] env[69367]: INFO nova.compute.manager [None req-9920216c-2ae6-4fb7-81d0-fbadfd2aa9c5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] [instance: d900df05-b65c-4a45-94d1-563afbf9c022] Terminating instance [ 884.198358] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cd9ad2c2-fea1-4fa2-895c-e17d46b88565 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.209295] env[69367]: DEBUG oslo_vmware.api [None req-775ed4e5-8bd2-4d2c-8d8d-3e33cb192c86 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Waiting for the task: (returnval){ [ 884.209295] env[69367]: value = "task-4234104" [ 884.209295] env[69367]: _type = "Task" [ 884.209295] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.218048] env[69367]: DEBUG oslo_vmware.api [None req-775ed4e5-8bd2-4d2c-8d8d-3e33cb192c86 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Task: {'id': task-4234104, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.430792] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-301644e4-917b-4f2d-9d31-b1c803726453 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.443754] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f581f10d-343d-47ce-9762-9ac48f12d0cf {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.482026] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12043733-c5b5-4b21-9114-4d0cdac9937c {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.490737] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b6e2ff8-a711-44c9-84ef-62e807008dde {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.507192] env[69367]: DEBUG nova.compute.provider_tree [None req-34a62984-6384-406f-821f-265cad7ab0f7 tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 884.619861] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5e205b40-cfcf-4563-9c86-1720cbb819cc tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Releasing lock "refresh_cache-4a46d003-f57e-4089-aa60-757a4246f071" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 884.620537] env[69367]: DEBUG nova.compute.manager [None req-5e205b40-cfcf-4563-9c86-1720cbb819cc tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 4a46d003-f57e-4089-aa60-757a4246f071] Start destroying the instance on the hypervisor. {{(pid=69367) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 884.620776] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-5e205b40-cfcf-4563-9c86-1720cbb819cc tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 4a46d003-f57e-4089-aa60-757a4246f071] Destroying instance {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 884.621168] env[69367]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e2179a4a-c6e1-4f60-b120-d7f721c7f2b3 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.631781] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1966f3b-8f9a-46c5-a7b1-d7edb345109e {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.652710] env[69367]: DEBUG oslo_vmware.api [None req-a6fb10fd-5288-49e0-b35b-8dedcaa9fd7f tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Task: {'id': task-4234103, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.159593} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.652710] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6fb10fd-5288-49e0-b35b-8dedcaa9fd7f tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Deleted the datastore file {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 884.652710] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-a6fb10fd-5288-49e0-b35b-8dedcaa9fd7f tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 46b6bc45-57f0-4850-9249-6bbb22b162c6] Deleted contents of the VM from datastore datastore1 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 884.652984] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-a6fb10fd-5288-49e0-b35b-8dedcaa9fd7f tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 46b6bc45-57f0-4850-9249-6bbb22b162c6] Instance destroyed {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 884.652984] env[69367]: INFO nova.compute.manager [None req-a6fb10fd-5288-49e0-b35b-8dedcaa9fd7f tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 46b6bc45-57f0-4850-9249-6bbb22b162c6] Took 1.14 seconds to destroy the instance on the hypervisor. [ 884.653183] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a6fb10fd-5288-49e0-b35b-8dedcaa9fd7f tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 884.653389] env[69367]: DEBUG nova.compute.manager [-] [instance: 46b6bc45-57f0-4850-9249-6bbb22b162c6] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 884.653483] env[69367]: DEBUG nova.network.neutron [-] [instance: 46b6bc45-57f0-4850-9249-6bbb22b162c6] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 884.661676] env[69367]: WARNING nova.virt.vmwareapi.vmops [None req-5e205b40-cfcf-4563-9c86-1720cbb819cc tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 4a46d003-f57e-4089-aa60-757a4246f071] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4a46d003-f57e-4089-aa60-757a4246f071 could not be found. [ 884.661847] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-5e205b40-cfcf-4563-9c86-1720cbb819cc tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 4a46d003-f57e-4089-aa60-757a4246f071] Instance destroyed {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 884.662042] env[69367]: INFO nova.compute.manager [None req-5e205b40-cfcf-4563-9c86-1720cbb819cc tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 4a46d003-f57e-4089-aa60-757a4246f071] Took 0.04 seconds to destroy the instance on the hypervisor. [ 884.662329] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5e205b40-cfcf-4563-9c86-1720cbb819cc tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 884.662945] env[69367]: DEBUG nova.compute.manager [-] [instance: 4a46d003-f57e-4089-aa60-757a4246f071] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 884.663066] env[69367]: DEBUG nova.network.neutron [-] [instance: 4a46d003-f57e-4089-aa60-757a4246f071] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 884.678966] env[69367]: DEBUG nova.network.neutron [-] [instance: 4a46d003-f57e-4089-aa60-757a4246f071] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 884.706393] env[69367]: DEBUG nova.compute.manager [None req-9920216c-2ae6-4fb7-81d0-fbadfd2aa9c5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] [instance: d900df05-b65c-4a45-94d1-563afbf9c022] Start destroying the instance on the hypervisor. {{(pid=69367) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 884.706623] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-9920216c-2ae6-4fb7-81d0-fbadfd2aa9c5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] [instance: d900df05-b65c-4a45-94d1-563afbf9c022] Destroying instance {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 884.707517] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dc52992-2d3d-4c99-8113-81545d997776 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.718990] env[69367]: DEBUG oslo_vmware.api [None req-775ed4e5-8bd2-4d2c-8d8d-3e33cb192c86 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Task: {'id': task-4234104, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.240297} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.721165] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-775ed4e5-8bd2-4d2c-8d8d-3e33cb192c86 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Deleted the datastore file {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 884.721368] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-775ed4e5-8bd2-4d2c-8d8d-3e33cb192c86 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 54a1f586-481d-427e-ba0b-be90e5573bd3] Deleted contents of the VM from datastore datastore1 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 884.721553] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-775ed4e5-8bd2-4d2c-8d8d-3e33cb192c86 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 54a1f586-481d-427e-ba0b-be90e5573bd3] Instance destroyed {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 884.721732] env[69367]: INFO nova.compute.manager [None req-775ed4e5-8bd2-4d2c-8d8d-3e33cb192c86 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 54a1f586-481d-427e-ba0b-be90e5573bd3] Took 1.14 seconds to destroy the instance on the hypervisor. [ 884.722041] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-775ed4e5-8bd2-4d2c-8d8d-3e33cb192c86 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 884.722564] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-9920216c-2ae6-4fb7-81d0-fbadfd2aa9c5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] [instance: d900df05-b65c-4a45-94d1-563afbf9c022] Powering off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 884.723037] env[69367]: DEBUG nova.compute.manager [-] [instance: 54a1f586-481d-427e-ba0b-be90e5573bd3] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 884.723177] env[69367]: DEBUG nova.network.neutron [-] [instance: 54a1f586-481d-427e-ba0b-be90e5573bd3] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 884.724797] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f7494127-d8a0-43d0-93a9-1ed4be98e70a {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.733716] env[69367]: DEBUG oslo_vmware.api [None req-9920216c-2ae6-4fb7-81d0-fbadfd2aa9c5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Waiting for the task: (returnval){ [ 884.733716] env[69367]: value = "task-4234105" [ 884.733716] env[69367]: _type = "Task" [ 884.733716] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.742167] env[69367]: DEBUG oslo_vmware.api [None req-9920216c-2ae6-4fb7-81d0-fbadfd2aa9c5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Task: {'id': task-4234105, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.918931] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e35df00c-08dc-425a-a9e2-95b6e24a7398 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 884.998211] env[69367]: DEBUG nova.compute.manager [req-99f8aebc-465f-4792-b83e-626aa7c1f6c6 req-bc3888e1-65f3-4b9c-b1b8-8c64b1f10abf service nova] [instance: 54a1f586-481d-427e-ba0b-be90e5573bd3] Received event network-vif-deleted-2217ec6c-a6da-4c26-b9de-53239e598080 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 884.998211] env[69367]: INFO nova.compute.manager [req-99f8aebc-465f-4792-b83e-626aa7c1f6c6 req-bc3888e1-65f3-4b9c-b1b8-8c64b1f10abf service nova] [instance: 54a1f586-481d-427e-ba0b-be90e5573bd3] Neutron deleted interface 2217ec6c-a6da-4c26-b9de-53239e598080; detaching it from the instance and deleting it from the info cache [ 884.999118] env[69367]: DEBUG nova.network.neutron [req-99f8aebc-465f-4792-b83e-626aa7c1f6c6 req-bc3888e1-65f3-4b9c-b1b8-8c64b1f10abf service nova] [instance: 54a1f586-481d-427e-ba0b-be90e5573bd3] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 885.031452] env[69367]: ERROR nova.scheduler.client.report [None req-34a62984-6384-406f-821f-265cad7ab0f7 tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [req-fa5b5c6d-bef5-4de9-a71c-cca60c1e7050] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-fa5b5c6d-bef5-4de9-a71c-cca60c1e7050"}]} [ 885.031452] env[69367]: DEBUG oslo_concurrency.lockutils [None req-34a62984-6384-406f-821f-265cad7ab0f7 tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.134s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 885.031452] env[69367]: ERROR nova.compute.manager [None req-34a62984-6384-406f-821f-265cad7ab0f7 tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: f311f965-e846-4519-8375-ffd831e6afc7] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 885.031452] env[69367]: ERROR nova.compute.manager [instance: f311f965-e846-4519-8375-ffd831e6afc7] Traceback (most recent call last): [ 885.031452] env[69367]: ERROR nova.compute.manager [instance: f311f965-e846-4519-8375-ffd831e6afc7] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 885.031452] env[69367]: ERROR nova.compute.manager [instance: f311f965-e846-4519-8375-ffd831e6afc7] yield [ 885.031452] env[69367]: ERROR nova.compute.manager [instance: f311f965-e846-4519-8375-ffd831e6afc7] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 885.031452] env[69367]: ERROR nova.compute.manager [instance: f311f965-e846-4519-8375-ffd831e6afc7] self.set_inventory_for_provider( [ 885.031452] env[69367]: ERROR nova.compute.manager [instance: f311f965-e846-4519-8375-ffd831e6afc7] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 885.031452] env[69367]: ERROR nova.compute.manager [instance: f311f965-e846-4519-8375-ffd831e6afc7] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 885.031452] env[69367]: ERROR nova.compute.manager [instance: f311f965-e846-4519-8375-ffd831e6afc7] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-fa5b5c6d-bef5-4de9-a71c-cca60c1e7050"}]} [ 885.031452] env[69367]: ERROR nova.compute.manager [instance: f311f965-e846-4519-8375-ffd831e6afc7] [ 885.031452] env[69367]: ERROR nova.compute.manager [instance: f311f965-e846-4519-8375-ffd831e6afc7] During handling of the above exception, another exception occurred: [ 885.031452] env[69367]: ERROR nova.compute.manager [instance: f311f965-e846-4519-8375-ffd831e6afc7] [ 885.031452] env[69367]: ERROR nova.compute.manager [instance: f311f965-e846-4519-8375-ffd831e6afc7] Traceback (most recent call last): [ 885.031452] env[69367]: ERROR nova.compute.manager [instance: f311f965-e846-4519-8375-ffd831e6afc7] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 885.031452] env[69367]: ERROR nova.compute.manager [instance: f311f965-e846-4519-8375-ffd831e6afc7] with self.rt.instance_claim(context, instance, node, allocs, [ 885.031452] env[69367]: ERROR nova.compute.manager [instance: f311f965-e846-4519-8375-ffd831e6afc7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 885.031452] env[69367]: ERROR nova.compute.manager [instance: f311f965-e846-4519-8375-ffd831e6afc7] return f(*args, **kwargs) [ 885.031452] env[69367]: ERROR nova.compute.manager [instance: f311f965-e846-4519-8375-ffd831e6afc7] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 885.031452] env[69367]: ERROR nova.compute.manager [instance: f311f965-e846-4519-8375-ffd831e6afc7] self._update(elevated, cn) [ 885.031452] env[69367]: ERROR nova.compute.manager [instance: f311f965-e846-4519-8375-ffd831e6afc7] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 885.031452] env[69367]: ERROR nova.compute.manager [instance: f311f965-e846-4519-8375-ffd831e6afc7] self._update_to_placement(context, compute_node, startup) [ 885.031452] env[69367]: ERROR nova.compute.manager [instance: f311f965-e846-4519-8375-ffd831e6afc7] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 885.031452] env[69367]: ERROR nova.compute.manager [instance: f311f965-e846-4519-8375-ffd831e6afc7] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 885.032818] env[69367]: ERROR nova.compute.manager [instance: f311f965-e846-4519-8375-ffd831e6afc7] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 885.032818] env[69367]: ERROR nova.compute.manager [instance: f311f965-e846-4519-8375-ffd831e6afc7] return attempt.get(self._wrap_exception) [ 885.032818] env[69367]: ERROR nova.compute.manager [instance: f311f965-e846-4519-8375-ffd831e6afc7] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 885.032818] env[69367]: ERROR nova.compute.manager [instance: f311f965-e846-4519-8375-ffd831e6afc7] six.reraise(self.value[0], self.value[1], self.value[2]) [ 885.032818] env[69367]: ERROR nova.compute.manager [instance: f311f965-e846-4519-8375-ffd831e6afc7] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 885.032818] env[69367]: ERROR nova.compute.manager [instance: f311f965-e846-4519-8375-ffd831e6afc7] raise value [ 885.032818] env[69367]: ERROR nova.compute.manager [instance: f311f965-e846-4519-8375-ffd831e6afc7] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 885.032818] env[69367]: ERROR nova.compute.manager [instance: f311f965-e846-4519-8375-ffd831e6afc7] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 885.032818] env[69367]: ERROR nova.compute.manager [instance: f311f965-e846-4519-8375-ffd831e6afc7] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 885.032818] env[69367]: ERROR nova.compute.manager [instance: f311f965-e846-4519-8375-ffd831e6afc7] self.reportclient.update_from_provider_tree( [ 885.032818] env[69367]: ERROR nova.compute.manager [instance: f311f965-e846-4519-8375-ffd831e6afc7] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 885.032818] env[69367]: ERROR nova.compute.manager [instance: f311f965-e846-4519-8375-ffd831e6afc7] with catch_all(pd.uuid): [ 885.032818] env[69367]: ERROR nova.compute.manager [instance: f311f965-e846-4519-8375-ffd831e6afc7] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 885.032818] env[69367]: ERROR nova.compute.manager [instance: f311f965-e846-4519-8375-ffd831e6afc7] self.gen.throw(typ, value, traceback) [ 885.032818] env[69367]: ERROR nova.compute.manager [instance: f311f965-e846-4519-8375-ffd831e6afc7] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 885.032818] env[69367]: ERROR nova.compute.manager [instance: f311f965-e846-4519-8375-ffd831e6afc7] raise exception.ResourceProviderSyncFailed() [ 885.032818] env[69367]: ERROR nova.compute.manager [instance: f311f965-e846-4519-8375-ffd831e6afc7] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 885.032818] env[69367]: ERROR nova.compute.manager [instance: f311f965-e846-4519-8375-ffd831e6afc7] [ 885.032818] env[69367]: DEBUG nova.compute.utils [None req-34a62984-6384-406f-821f-265cad7ab0f7 tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: f311f965-e846-4519-8375-ffd831e6afc7] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 885.032818] env[69367]: DEBUG oslo_concurrency.lockutils [None req-adc3b60d-ebe8-4239-8397-225765297e6e tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.671s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 885.034747] env[69367]: INFO nova.compute.claims [None req-adc3b60d-ebe8-4239-8397-225765297e6e tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 4f53c9fd-4c1a-4ac0-8116-41e54be9de18] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 885.038223] env[69367]: DEBUG nova.compute.manager [None req-34a62984-6384-406f-821f-265cad7ab0f7 tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: f311f965-e846-4519-8375-ffd831e6afc7] Build of instance f311f965-e846-4519-8375-ffd831e6afc7 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 885.038755] env[69367]: DEBUG nova.compute.manager [None req-34a62984-6384-406f-821f-265cad7ab0f7 tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: f311f965-e846-4519-8375-ffd831e6afc7] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 885.038987] env[69367]: DEBUG oslo_concurrency.lockutils [None req-34a62984-6384-406f-821f-265cad7ab0f7 tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Acquiring lock "refresh_cache-f311f965-e846-4519-8375-ffd831e6afc7" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 885.039155] env[69367]: DEBUG oslo_concurrency.lockutils [None req-34a62984-6384-406f-821f-265cad7ab0f7 tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Acquired lock "refresh_cache-f311f965-e846-4519-8375-ffd831e6afc7" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 885.039322] env[69367]: DEBUG nova.network.neutron [None req-34a62984-6384-406f-821f-265cad7ab0f7 tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: f311f965-e846-4519-8375-ffd831e6afc7] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 885.042459] env[69367]: DEBUG nova.compute.manager [req-18368ae4-e9b1-408a-8c63-7c43c1527fd5 req-498d1fdf-0b29-4447-9eee-08c67f1086e4 service nova] [instance: 46b6bc45-57f0-4850-9249-6bbb22b162c6] Received event network-vif-deleted-4fc784f5-80ec-41ce-bb71-af0e71d38e84 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 885.043036] env[69367]: INFO nova.compute.manager [req-18368ae4-e9b1-408a-8c63-7c43c1527fd5 req-498d1fdf-0b29-4447-9eee-08c67f1086e4 service nova] [instance: 46b6bc45-57f0-4850-9249-6bbb22b162c6] Neutron deleted interface 4fc784f5-80ec-41ce-bb71-af0e71d38e84; detaching it from the instance and deleting it from the info cache [ 885.043131] env[69367]: DEBUG nova.network.neutron [req-18368ae4-e9b1-408a-8c63-7c43c1527fd5 req-498d1fdf-0b29-4447-9eee-08c67f1086e4 service nova] [instance: 46b6bc45-57f0-4850-9249-6bbb22b162c6] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 885.181330] env[69367]: DEBUG nova.network.neutron [-] [instance: 4a46d003-f57e-4089-aa60-757a4246f071] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 885.244346] env[69367]: DEBUG oslo_vmware.api [None req-9920216c-2ae6-4fb7-81d0-fbadfd2aa9c5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Task: {'id': task-4234105, 'name': PowerOffVM_Task, 'duration_secs': 0.212525} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.244586] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-9920216c-2ae6-4fb7-81d0-fbadfd2aa9c5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] [instance: d900df05-b65c-4a45-94d1-563afbf9c022] Powered off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 885.244764] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-9920216c-2ae6-4fb7-81d0-fbadfd2aa9c5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] [instance: d900df05-b65c-4a45-94d1-563afbf9c022] Unregistering the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 885.245040] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cd2f3176-1474-4b40-81d7-3c1aa316bc50 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.331698] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-9920216c-2ae6-4fb7-81d0-fbadfd2aa9c5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] [instance: d900df05-b65c-4a45-94d1-563afbf9c022] Unregistered the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 885.332014] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-9920216c-2ae6-4fb7-81d0-fbadfd2aa9c5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] [instance: d900df05-b65c-4a45-94d1-563afbf9c022] Deleting contents of the VM from datastore datastore1 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 885.332266] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-9920216c-2ae6-4fb7-81d0-fbadfd2aa9c5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Deleting the datastore file [datastore1] d900df05-b65c-4a45-94d1-563afbf9c022 {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 885.332603] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6f061a59-71db-4515-a997-a6e034612fb1 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.340374] env[69367]: DEBUG oslo_vmware.api [None req-9920216c-2ae6-4fb7-81d0-fbadfd2aa9c5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Waiting for the task: (returnval){ [ 885.340374] env[69367]: value = "task-4234107" [ 885.340374] env[69367]: _type = "Task" [ 885.340374] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.348286] env[69367]: DEBUG oslo_vmware.api [None req-9920216c-2ae6-4fb7-81d0-fbadfd2aa9c5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Task: {'id': task-4234107, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.440989] env[69367]: DEBUG nova.network.neutron [-] [instance: 54a1f586-481d-427e-ba0b-be90e5573bd3] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 885.458172] env[69367]: DEBUG nova.network.neutron [-] [instance: 46b6bc45-57f0-4850-9249-6bbb22b162c6] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 885.502068] env[69367]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9f79d6b0-ff9b-44fa-b73a-580fbf5cd4d3 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.512494] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3af1adbe-cab2-4e09-a720-e41a270e85e5 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.537470] env[69367]: DEBUG nova.compute.manager [req-99f8aebc-465f-4792-b83e-626aa7c1f6c6 req-bc3888e1-65f3-4b9c-b1b8-8c64b1f10abf service nova] [instance: 54a1f586-481d-427e-ba0b-be90e5573bd3] Detach interface failed, port_id=2217ec6c-a6da-4c26-b9de-53239e598080, reason: Instance 54a1f586-481d-427e-ba0b-be90e5573bd3 could not be found. {{(pid=69367) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11601}} [ 885.544820] env[69367]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0dbc1604-060f-468d-be65-0c192afa675a {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.554512] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa41e92a-d358-4b3d-9bef-1713bcc5efea {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.566229] env[69367]: DEBUG nova.network.neutron [None req-34a62984-6384-406f-821f-265cad7ab0f7 tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: f311f965-e846-4519-8375-ffd831e6afc7] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 885.583975] env[69367]: DEBUG nova.compute.manager [req-18368ae4-e9b1-408a-8c63-7c43c1527fd5 req-498d1fdf-0b29-4447-9eee-08c67f1086e4 service nova] [instance: 46b6bc45-57f0-4850-9249-6bbb22b162c6] Detach interface failed, port_id=4fc784f5-80ec-41ce-bb71-af0e71d38e84, reason: Instance 46b6bc45-57f0-4850-9249-6bbb22b162c6 could not be found. {{(pid=69367) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11601}} [ 885.641258] env[69367]: DEBUG nova.network.neutron [None req-34a62984-6384-406f-821f-265cad7ab0f7 tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: f311f965-e846-4519-8375-ffd831e6afc7] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 885.684049] env[69367]: INFO nova.compute.manager [-] [instance: 4a46d003-f57e-4089-aa60-757a4246f071] Took 1.02 seconds to deallocate network for instance. [ 885.854209] env[69367]: DEBUG oslo_vmware.api [None req-9920216c-2ae6-4fb7-81d0-fbadfd2aa9c5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Task: {'id': task-4234107, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.944316] env[69367]: INFO nova.compute.manager [-] [instance: 54a1f586-481d-427e-ba0b-be90e5573bd3] Took 1.22 seconds to deallocate network for instance. [ 885.961351] env[69367]: INFO nova.compute.manager [-] [instance: 46b6bc45-57f0-4850-9249-6bbb22b162c6] Took 1.31 seconds to deallocate network for instance. [ 886.064704] env[69367]: DEBUG nova.scheduler.client.report [None req-adc3b60d-ebe8-4239-8397-225765297e6e tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 886.081197] env[69367]: DEBUG nova.scheduler.client.report [None req-adc3b60d-ebe8-4239-8397-225765297e6e tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 886.081449] env[69367]: DEBUG nova.compute.provider_tree [None req-adc3b60d-ebe8-4239-8397-225765297e6e tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 886.097203] env[69367]: DEBUG nova.scheduler.client.report [None req-adc3b60d-ebe8-4239-8397-225765297e6e tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 886.117766] env[69367]: DEBUG nova.scheduler.client.report [None req-adc3b60d-ebe8-4239-8397-225765297e6e tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 886.147730] env[69367]: DEBUG oslo_concurrency.lockutils [None req-34a62984-6384-406f-821f-265cad7ab0f7 tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Releasing lock "refresh_cache-f311f965-e846-4519-8375-ffd831e6afc7" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 886.148068] env[69367]: DEBUG nova.compute.manager [None req-34a62984-6384-406f-821f-265cad7ab0f7 tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 886.148322] env[69367]: DEBUG nova.compute.manager [None req-34a62984-6384-406f-821f-265cad7ab0f7 tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: f311f965-e846-4519-8375-ffd831e6afc7] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 886.148521] env[69367]: DEBUG nova.network.neutron [None req-34a62984-6384-406f-821f-265cad7ab0f7 tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: f311f965-e846-4519-8375-ffd831e6afc7] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 886.172536] env[69367]: DEBUG nova.network.neutron [None req-34a62984-6384-406f-821f-265cad7ab0f7 tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: f311f965-e846-4519-8375-ffd831e6afc7] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 886.193362] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5e205b40-cfcf-4563-9c86-1720cbb819cc tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 886.352817] env[69367]: DEBUG oslo_vmware.api [None req-9920216c-2ae6-4fb7-81d0-fbadfd2aa9c5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Task: {'id': task-4234107, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.596564} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.353081] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-9920216c-2ae6-4fb7-81d0-fbadfd2aa9c5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Deleted the datastore file {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 886.353268] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-9920216c-2ae6-4fb7-81d0-fbadfd2aa9c5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] [instance: d900df05-b65c-4a45-94d1-563afbf9c022] Deleted contents of the VM from datastore datastore1 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 886.353444] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-9920216c-2ae6-4fb7-81d0-fbadfd2aa9c5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] [instance: d900df05-b65c-4a45-94d1-563afbf9c022] Instance destroyed {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 886.353618] env[69367]: INFO nova.compute.manager [None req-9920216c-2ae6-4fb7-81d0-fbadfd2aa9c5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] [instance: d900df05-b65c-4a45-94d1-563afbf9c022] Took 1.65 seconds to destroy the instance on the hypervisor. [ 886.353851] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9920216c-2ae6-4fb7-81d0-fbadfd2aa9c5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 886.354052] env[69367]: DEBUG nova.compute.manager [-] [instance: d900df05-b65c-4a45-94d1-563afbf9c022] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 886.354149] env[69367]: DEBUG nova.network.neutron [-] [instance: d900df05-b65c-4a45-94d1-563afbf9c022] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 886.453143] env[69367]: DEBUG oslo_concurrency.lockutils [None req-775ed4e5-8bd2-4d2c-8d8d-3e33cb192c86 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 886.467456] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a6fb10fd-5288-49e0-b35b-8dedcaa9fd7f tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 886.482945] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f152defe-bb34-4b65-9032-64db5311db10 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.490451] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67c757af-8545-4ab7-952e-91eef8484cf4 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.524615] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85fb0218-eae6-456c-a009-649c3344e2f1 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.532951] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c6f6f7a-bc42-4381-bcb8-205e352643c5 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.546790] env[69367]: DEBUG nova.compute.provider_tree [None req-adc3b60d-ebe8-4239-8397-225765297e6e tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 886.679546] env[69367]: DEBUG nova.network.neutron [None req-34a62984-6384-406f-821f-265cad7ab0f7 tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: f311f965-e846-4519-8375-ffd831e6afc7] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 887.069398] env[69367]: DEBUG nova.compute.manager [req-7dc0d969-bbbb-423c-bfb0-5e857f40e87e req-56202d64-fb0b-4e4a-be5c-19eb270ca764 service nova] [instance: d900df05-b65c-4a45-94d1-563afbf9c022] Received event network-vif-deleted-8ae37b9e-8ee4-4b68-b70f-46238b3bc14e {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 887.069642] env[69367]: INFO nova.compute.manager [req-7dc0d969-bbbb-423c-bfb0-5e857f40e87e req-56202d64-fb0b-4e4a-be5c-19eb270ca764 service nova] [instance: d900df05-b65c-4a45-94d1-563afbf9c022] Neutron deleted interface 8ae37b9e-8ee4-4b68-b70f-46238b3bc14e; detaching it from the instance and deleting it from the info cache [ 887.069808] env[69367]: DEBUG nova.network.neutron [req-7dc0d969-bbbb-423c-bfb0-5e857f40e87e req-56202d64-fb0b-4e4a-be5c-19eb270ca764 service nova] [instance: d900df05-b65c-4a45-94d1-563afbf9c022] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 887.071677] env[69367]: ERROR nova.scheduler.client.report [None req-adc3b60d-ebe8-4239-8397-225765297e6e tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [req-a403a35f-574d-4c49-9f79-308dab8b0ecd] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-a403a35f-574d-4c49-9f79-308dab8b0ecd"}]} [ 887.072037] env[69367]: DEBUG oslo_concurrency.lockutils [None req-adc3b60d-ebe8-4239-8397-225765297e6e tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.040s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 887.072652] env[69367]: ERROR nova.compute.manager [None req-adc3b60d-ebe8-4239-8397-225765297e6e tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 4f53c9fd-4c1a-4ac0-8116-41e54be9de18] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 887.072652] env[69367]: ERROR nova.compute.manager [instance: 4f53c9fd-4c1a-4ac0-8116-41e54be9de18] Traceback (most recent call last): [ 887.072652] env[69367]: ERROR nova.compute.manager [instance: 4f53c9fd-4c1a-4ac0-8116-41e54be9de18] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 887.072652] env[69367]: ERROR nova.compute.manager [instance: 4f53c9fd-4c1a-4ac0-8116-41e54be9de18] yield [ 887.072652] env[69367]: ERROR nova.compute.manager [instance: 4f53c9fd-4c1a-4ac0-8116-41e54be9de18] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 887.072652] env[69367]: ERROR nova.compute.manager [instance: 4f53c9fd-4c1a-4ac0-8116-41e54be9de18] self.set_inventory_for_provider( [ 887.072652] env[69367]: ERROR nova.compute.manager [instance: 4f53c9fd-4c1a-4ac0-8116-41e54be9de18] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 887.072652] env[69367]: ERROR nova.compute.manager [instance: 4f53c9fd-4c1a-4ac0-8116-41e54be9de18] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 887.072652] env[69367]: ERROR nova.compute.manager [instance: 4f53c9fd-4c1a-4ac0-8116-41e54be9de18] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-a403a35f-574d-4c49-9f79-308dab8b0ecd"}]} [ 887.072652] env[69367]: ERROR nova.compute.manager [instance: 4f53c9fd-4c1a-4ac0-8116-41e54be9de18] [ 887.072652] env[69367]: ERROR nova.compute.manager [instance: 4f53c9fd-4c1a-4ac0-8116-41e54be9de18] During handling of the above exception, another exception occurred: [ 887.072652] env[69367]: ERROR nova.compute.manager [instance: 4f53c9fd-4c1a-4ac0-8116-41e54be9de18] [ 887.072652] env[69367]: ERROR nova.compute.manager [instance: 4f53c9fd-4c1a-4ac0-8116-41e54be9de18] Traceback (most recent call last): [ 887.072652] env[69367]: ERROR nova.compute.manager [instance: 4f53c9fd-4c1a-4ac0-8116-41e54be9de18] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 887.072652] env[69367]: ERROR nova.compute.manager [instance: 4f53c9fd-4c1a-4ac0-8116-41e54be9de18] with self.rt.instance_claim(context, instance, node, allocs, [ 887.072652] env[69367]: ERROR nova.compute.manager [instance: 4f53c9fd-4c1a-4ac0-8116-41e54be9de18] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 887.072652] env[69367]: ERROR nova.compute.manager [instance: 4f53c9fd-4c1a-4ac0-8116-41e54be9de18] return f(*args, **kwargs) [ 887.072652] env[69367]: ERROR nova.compute.manager [instance: 4f53c9fd-4c1a-4ac0-8116-41e54be9de18] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 887.072652] env[69367]: ERROR nova.compute.manager [instance: 4f53c9fd-4c1a-4ac0-8116-41e54be9de18] self._update(elevated, cn) [ 887.072652] env[69367]: ERROR nova.compute.manager [instance: 4f53c9fd-4c1a-4ac0-8116-41e54be9de18] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 887.072652] env[69367]: ERROR nova.compute.manager [instance: 4f53c9fd-4c1a-4ac0-8116-41e54be9de18] self._update_to_placement(context, compute_node, startup) [ 887.072652] env[69367]: ERROR nova.compute.manager [instance: 4f53c9fd-4c1a-4ac0-8116-41e54be9de18] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 887.072652] env[69367]: ERROR nova.compute.manager [instance: 4f53c9fd-4c1a-4ac0-8116-41e54be9de18] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 887.072652] env[69367]: ERROR nova.compute.manager [instance: 4f53c9fd-4c1a-4ac0-8116-41e54be9de18] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 887.072652] env[69367]: ERROR nova.compute.manager [instance: 4f53c9fd-4c1a-4ac0-8116-41e54be9de18] return attempt.get(self._wrap_exception) [ 887.072652] env[69367]: ERROR nova.compute.manager [instance: 4f53c9fd-4c1a-4ac0-8116-41e54be9de18] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 887.072652] env[69367]: ERROR nova.compute.manager [instance: 4f53c9fd-4c1a-4ac0-8116-41e54be9de18] six.reraise(self.value[0], self.value[1], self.value[2]) [ 887.072652] env[69367]: ERROR nova.compute.manager [instance: 4f53c9fd-4c1a-4ac0-8116-41e54be9de18] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 887.072652] env[69367]: ERROR nova.compute.manager [instance: 4f53c9fd-4c1a-4ac0-8116-41e54be9de18] raise value [ 887.072652] env[69367]: ERROR nova.compute.manager [instance: 4f53c9fd-4c1a-4ac0-8116-41e54be9de18] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 887.072652] env[69367]: ERROR nova.compute.manager [instance: 4f53c9fd-4c1a-4ac0-8116-41e54be9de18] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 887.072652] env[69367]: ERROR nova.compute.manager [instance: 4f53c9fd-4c1a-4ac0-8116-41e54be9de18] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 887.072652] env[69367]: ERROR nova.compute.manager [instance: 4f53c9fd-4c1a-4ac0-8116-41e54be9de18] self.reportclient.update_from_provider_tree( [ 887.072652] env[69367]: ERROR nova.compute.manager [instance: 4f53c9fd-4c1a-4ac0-8116-41e54be9de18] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 887.073744] env[69367]: ERROR nova.compute.manager [instance: 4f53c9fd-4c1a-4ac0-8116-41e54be9de18] with catch_all(pd.uuid): [ 887.073744] env[69367]: ERROR nova.compute.manager [instance: 4f53c9fd-4c1a-4ac0-8116-41e54be9de18] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 887.073744] env[69367]: ERROR nova.compute.manager [instance: 4f53c9fd-4c1a-4ac0-8116-41e54be9de18] self.gen.throw(typ, value, traceback) [ 887.073744] env[69367]: ERROR nova.compute.manager [instance: 4f53c9fd-4c1a-4ac0-8116-41e54be9de18] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 887.073744] env[69367]: ERROR nova.compute.manager [instance: 4f53c9fd-4c1a-4ac0-8116-41e54be9de18] raise exception.ResourceProviderSyncFailed() [ 887.073744] env[69367]: ERROR nova.compute.manager [instance: 4f53c9fd-4c1a-4ac0-8116-41e54be9de18] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 887.073744] env[69367]: ERROR nova.compute.manager [instance: 4f53c9fd-4c1a-4ac0-8116-41e54be9de18] [ 887.073744] env[69367]: DEBUG nova.compute.utils [None req-adc3b60d-ebe8-4239-8397-225765297e6e tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 4f53c9fd-4c1a-4ac0-8116-41e54be9de18] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 887.074998] env[69367]: DEBUG oslo_concurrency.lockutils [None req-17f0e1de-c958-43f7-a014-b456f23e5dbb tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.660s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 887.076330] env[69367]: INFO nova.compute.claims [None req-17f0e1de-c958-43f7-a014-b456f23e5dbb tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: 9aec881e-8381-4626-b527-3df7e0671d8f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 887.079369] env[69367]: DEBUG nova.compute.manager [None req-adc3b60d-ebe8-4239-8397-225765297e6e tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 4f53c9fd-4c1a-4ac0-8116-41e54be9de18] Build of instance 4f53c9fd-4c1a-4ac0-8116-41e54be9de18 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 887.079797] env[69367]: DEBUG nova.compute.manager [None req-adc3b60d-ebe8-4239-8397-225765297e6e tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 4f53c9fd-4c1a-4ac0-8116-41e54be9de18] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 887.080077] env[69367]: DEBUG oslo_concurrency.lockutils [None req-adc3b60d-ebe8-4239-8397-225765297e6e tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Acquiring lock "refresh_cache-4f53c9fd-4c1a-4ac0-8116-41e54be9de18" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 887.080230] env[69367]: DEBUG oslo_concurrency.lockutils [None req-adc3b60d-ebe8-4239-8397-225765297e6e tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Acquired lock "refresh_cache-4f53c9fd-4c1a-4ac0-8116-41e54be9de18" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 887.080420] env[69367]: DEBUG nova.network.neutron [None req-adc3b60d-ebe8-4239-8397-225765297e6e tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 4f53c9fd-4c1a-4ac0-8116-41e54be9de18] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 887.081667] env[69367]: DEBUG nova.network.neutron [-] [instance: d900df05-b65c-4a45-94d1-563afbf9c022] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 887.181944] env[69367]: INFO nova.compute.manager [None req-34a62984-6384-406f-821f-265cad7ab0f7 tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: f311f965-e846-4519-8375-ffd831e6afc7] Took 1.03 seconds to deallocate network for instance. [ 887.580131] env[69367]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-78a893c8-7a3c-4135-a5b9-a7658dd28e60 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.587814] env[69367]: INFO nova.compute.manager [-] [instance: d900df05-b65c-4a45-94d1-563afbf9c022] Took 1.23 seconds to deallocate network for instance. [ 887.599029] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00411cc4-d3d1-4971-8244-b43216ee6509 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.610319] env[69367]: DEBUG nova.network.neutron [None req-adc3b60d-ebe8-4239-8397-225765297e6e tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 4f53c9fd-4c1a-4ac0-8116-41e54be9de18] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 887.627521] env[69367]: DEBUG nova.compute.manager [req-7dc0d969-bbbb-423c-bfb0-5e857f40e87e req-56202d64-fb0b-4e4a-be5c-19eb270ca764 service nova] [instance: d900df05-b65c-4a45-94d1-563afbf9c022] Detach interface failed, port_id=8ae37b9e-8ee4-4b68-b70f-46238b3bc14e, reason: Instance d900df05-b65c-4a45-94d1-563afbf9c022 could not be found. {{(pid=69367) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11601}} [ 887.707619] env[69367]: DEBUG nova.network.neutron [None req-adc3b60d-ebe8-4239-8397-225765297e6e tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 4f53c9fd-4c1a-4ac0-8116-41e54be9de18] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 888.098015] env[69367]: DEBUG oslo_concurrency.lockutils [None req-9920216c-2ae6-4fb7-81d0-fbadfd2aa9c5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 888.109515] env[69367]: DEBUG nova.scheduler.client.report [None req-17f0e1de-c958-43f7-a014-b456f23e5dbb tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 888.128736] env[69367]: DEBUG nova.scheduler.client.report [None req-17f0e1de-c958-43f7-a014-b456f23e5dbb tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 888.128950] env[69367]: DEBUG nova.compute.provider_tree [None req-17f0e1de-c958-43f7-a014-b456f23e5dbb tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 888.143915] env[69367]: DEBUG nova.scheduler.client.report [None req-17f0e1de-c958-43f7-a014-b456f23e5dbb tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 888.164186] env[69367]: DEBUG nova.scheduler.client.report [None req-17f0e1de-c958-43f7-a014-b456f23e5dbb tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 888.213428] env[69367]: DEBUG oslo_concurrency.lockutils [None req-adc3b60d-ebe8-4239-8397-225765297e6e tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Releasing lock "refresh_cache-4f53c9fd-4c1a-4ac0-8116-41e54be9de18" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 888.213727] env[69367]: DEBUG nova.compute.manager [None req-adc3b60d-ebe8-4239-8397-225765297e6e tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 888.213919] env[69367]: DEBUG nova.compute.manager [None req-adc3b60d-ebe8-4239-8397-225765297e6e tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 4f53c9fd-4c1a-4ac0-8116-41e54be9de18] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 888.214104] env[69367]: DEBUG nova.network.neutron [None req-adc3b60d-ebe8-4239-8397-225765297e6e tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 4f53c9fd-4c1a-4ac0-8116-41e54be9de18] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 888.217046] env[69367]: INFO nova.scheduler.client.report [None req-34a62984-6384-406f-821f-265cad7ab0f7 tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Deleted allocations for instance f311f965-e846-4519-8375-ffd831e6afc7 [ 888.249228] env[69367]: DEBUG nova.network.neutron [None req-adc3b60d-ebe8-4239-8397-225765297e6e tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 4f53c9fd-4c1a-4ac0-8116-41e54be9de18] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 888.414507] env[69367]: DEBUG oslo_concurrency.lockutils [None req-ffc35d42-3ee2-4ca8-9c3f-29373ee60f12 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Acquiring lock "557dc011-44a1-4240-9596-d055d57e176f" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 888.414807] env[69367]: DEBUG oslo_concurrency.lockutils [None req-ffc35d42-3ee2-4ca8-9c3f-29373ee60f12 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Lock "557dc011-44a1-4240-9596-d055d57e176f" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 888.513786] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3298c4d8-d4bb-44cd-ad09-fbf5ad71a0c1 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.523273] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fb6476b-a8d1-49b6-b454-b8ab854473b4 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.554133] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81d980fe-abfc-49a3-8667-74d6d8f6eeef {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.562584] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0696aa45-5506-4da2-b176-3162c747f899 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.576127] env[69367]: DEBUG nova.compute.provider_tree [None req-17f0e1de-c958-43f7-a014-b456f23e5dbb tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 888.725976] env[69367]: DEBUG oslo_concurrency.lockutils [None req-34a62984-6384-406f-821f-265cad7ab0f7 tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Lock "f311f965-e846-4519-8375-ffd831e6afc7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.622s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 888.750749] env[69367]: DEBUG nova.network.neutron [None req-adc3b60d-ebe8-4239-8397-225765297e6e tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 4f53c9fd-4c1a-4ac0-8116-41e54be9de18] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 888.920165] env[69367]: DEBUG nova.compute.utils [None req-ffc35d42-3ee2-4ca8-9c3f-29373ee60f12 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Using /dev/sd instead of None {{(pid=69367) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 889.100412] env[69367]: ERROR nova.scheduler.client.report [None req-17f0e1de-c958-43f7-a014-b456f23e5dbb tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [req-74685234-2df0-46c1-aada-c4b91eecf302] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-74685234-2df0-46c1-aada-c4b91eecf302"}]} [ 889.100731] env[69367]: DEBUG oslo_concurrency.lockutils [None req-17f0e1de-c958-43f7-a014-b456f23e5dbb tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.026s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 889.101351] env[69367]: ERROR nova.compute.manager [None req-17f0e1de-c958-43f7-a014-b456f23e5dbb tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: 9aec881e-8381-4626-b527-3df7e0671d8f] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 889.101351] env[69367]: ERROR nova.compute.manager [instance: 9aec881e-8381-4626-b527-3df7e0671d8f] Traceback (most recent call last): [ 889.101351] env[69367]: ERROR nova.compute.manager [instance: 9aec881e-8381-4626-b527-3df7e0671d8f] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 889.101351] env[69367]: ERROR nova.compute.manager [instance: 9aec881e-8381-4626-b527-3df7e0671d8f] yield [ 889.101351] env[69367]: ERROR nova.compute.manager [instance: 9aec881e-8381-4626-b527-3df7e0671d8f] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 889.101351] env[69367]: ERROR nova.compute.manager [instance: 9aec881e-8381-4626-b527-3df7e0671d8f] self.set_inventory_for_provider( [ 889.101351] env[69367]: ERROR nova.compute.manager [instance: 9aec881e-8381-4626-b527-3df7e0671d8f] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 889.101351] env[69367]: ERROR nova.compute.manager [instance: 9aec881e-8381-4626-b527-3df7e0671d8f] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 889.101351] env[69367]: ERROR nova.compute.manager [instance: 9aec881e-8381-4626-b527-3df7e0671d8f] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-74685234-2df0-46c1-aada-c4b91eecf302"}]} [ 889.101351] env[69367]: ERROR nova.compute.manager [instance: 9aec881e-8381-4626-b527-3df7e0671d8f] [ 889.101351] env[69367]: ERROR nova.compute.manager [instance: 9aec881e-8381-4626-b527-3df7e0671d8f] During handling of the above exception, another exception occurred: [ 889.101351] env[69367]: ERROR nova.compute.manager [instance: 9aec881e-8381-4626-b527-3df7e0671d8f] [ 889.101351] env[69367]: ERROR nova.compute.manager [instance: 9aec881e-8381-4626-b527-3df7e0671d8f] Traceback (most recent call last): [ 889.101351] env[69367]: ERROR nova.compute.manager [instance: 9aec881e-8381-4626-b527-3df7e0671d8f] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 889.101351] env[69367]: ERROR nova.compute.manager [instance: 9aec881e-8381-4626-b527-3df7e0671d8f] with self.rt.instance_claim(context, instance, node, allocs, [ 889.101351] env[69367]: ERROR nova.compute.manager [instance: 9aec881e-8381-4626-b527-3df7e0671d8f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 889.101351] env[69367]: ERROR nova.compute.manager [instance: 9aec881e-8381-4626-b527-3df7e0671d8f] return f(*args, **kwargs) [ 889.101351] env[69367]: ERROR nova.compute.manager [instance: 9aec881e-8381-4626-b527-3df7e0671d8f] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 889.101351] env[69367]: ERROR nova.compute.manager [instance: 9aec881e-8381-4626-b527-3df7e0671d8f] self._update(elevated, cn) [ 889.101351] env[69367]: ERROR nova.compute.manager [instance: 9aec881e-8381-4626-b527-3df7e0671d8f] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 889.101351] env[69367]: ERROR nova.compute.manager [instance: 9aec881e-8381-4626-b527-3df7e0671d8f] self._update_to_placement(context, compute_node, startup) [ 889.101351] env[69367]: ERROR nova.compute.manager [instance: 9aec881e-8381-4626-b527-3df7e0671d8f] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 889.101351] env[69367]: ERROR nova.compute.manager [instance: 9aec881e-8381-4626-b527-3df7e0671d8f] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 889.101351] env[69367]: ERROR nova.compute.manager [instance: 9aec881e-8381-4626-b527-3df7e0671d8f] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 889.101351] env[69367]: ERROR nova.compute.manager [instance: 9aec881e-8381-4626-b527-3df7e0671d8f] return attempt.get(self._wrap_exception) [ 889.101351] env[69367]: ERROR nova.compute.manager [instance: 9aec881e-8381-4626-b527-3df7e0671d8f] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 889.101351] env[69367]: ERROR nova.compute.manager [instance: 9aec881e-8381-4626-b527-3df7e0671d8f] six.reraise(self.value[0], self.value[1], self.value[2]) [ 889.101351] env[69367]: ERROR nova.compute.manager [instance: 9aec881e-8381-4626-b527-3df7e0671d8f] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 889.101351] env[69367]: ERROR nova.compute.manager [instance: 9aec881e-8381-4626-b527-3df7e0671d8f] raise value [ 889.101351] env[69367]: ERROR nova.compute.manager [instance: 9aec881e-8381-4626-b527-3df7e0671d8f] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 889.101351] env[69367]: ERROR nova.compute.manager [instance: 9aec881e-8381-4626-b527-3df7e0671d8f] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 889.101351] env[69367]: ERROR nova.compute.manager [instance: 9aec881e-8381-4626-b527-3df7e0671d8f] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 889.101351] env[69367]: ERROR nova.compute.manager [instance: 9aec881e-8381-4626-b527-3df7e0671d8f] self.reportclient.update_from_provider_tree( [ 889.101351] env[69367]: ERROR nova.compute.manager [instance: 9aec881e-8381-4626-b527-3df7e0671d8f] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 889.102318] env[69367]: ERROR nova.compute.manager [instance: 9aec881e-8381-4626-b527-3df7e0671d8f] with catch_all(pd.uuid): [ 889.102318] env[69367]: ERROR nova.compute.manager [instance: 9aec881e-8381-4626-b527-3df7e0671d8f] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 889.102318] env[69367]: ERROR nova.compute.manager [instance: 9aec881e-8381-4626-b527-3df7e0671d8f] self.gen.throw(typ, value, traceback) [ 889.102318] env[69367]: ERROR nova.compute.manager [instance: 9aec881e-8381-4626-b527-3df7e0671d8f] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 889.102318] env[69367]: ERROR nova.compute.manager [instance: 9aec881e-8381-4626-b527-3df7e0671d8f] raise exception.ResourceProviderSyncFailed() [ 889.102318] env[69367]: ERROR nova.compute.manager [instance: 9aec881e-8381-4626-b527-3df7e0671d8f] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 889.102318] env[69367]: ERROR nova.compute.manager [instance: 9aec881e-8381-4626-b527-3df7e0671d8f] [ 889.102318] env[69367]: DEBUG nova.compute.utils [None req-17f0e1de-c958-43f7-a014-b456f23e5dbb tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: 9aec881e-8381-4626-b527-3df7e0671d8f] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 889.104469] env[69367]: DEBUG oslo_concurrency.lockutils [None req-4dc2f4c5-b530-43f5-82af-dd384731d332 tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.982s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 889.104553] env[69367]: DEBUG nova.objects.instance [None req-4dc2f4c5-b530-43f5-82af-dd384731d332 tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Lazy-loading 'resources' on Instance uuid 05aae150-5d86-4210-ae7e-8c63e83cb907 {{(pid=69367) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 889.106024] env[69367]: DEBUG nova.compute.manager [None req-17f0e1de-c958-43f7-a014-b456f23e5dbb tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: 9aec881e-8381-4626-b527-3df7e0671d8f] Build of instance 9aec881e-8381-4626-b527-3df7e0671d8f was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 889.106463] env[69367]: DEBUG nova.compute.manager [None req-17f0e1de-c958-43f7-a014-b456f23e5dbb tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: 9aec881e-8381-4626-b527-3df7e0671d8f] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 889.106691] env[69367]: DEBUG oslo_concurrency.lockutils [None req-17f0e1de-c958-43f7-a014-b456f23e5dbb tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Acquiring lock "refresh_cache-9aec881e-8381-4626-b527-3df7e0671d8f" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 889.106904] env[69367]: DEBUG oslo_concurrency.lockutils [None req-17f0e1de-c958-43f7-a014-b456f23e5dbb tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Acquired lock "refresh_cache-9aec881e-8381-4626-b527-3df7e0671d8f" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 889.107481] env[69367]: DEBUG nova.network.neutron [None req-17f0e1de-c958-43f7-a014-b456f23e5dbb tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: 9aec881e-8381-4626-b527-3df7e0671d8f] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 889.254279] env[69367]: INFO nova.compute.manager [None req-adc3b60d-ebe8-4239-8397-225765297e6e tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 4f53c9fd-4c1a-4ac0-8116-41e54be9de18] Took 1.04 seconds to deallocate network for instance. [ 889.423656] env[69367]: DEBUG oslo_concurrency.lockutils [None req-ffc35d42-3ee2-4ca8-9c3f-29373ee60f12 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Lock "557dc011-44a1-4240-9596-d055d57e176f" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.009s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 889.629662] env[69367]: DEBUG nova.scheduler.client.report [None req-4dc2f4c5-b530-43f5-82af-dd384731d332 tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 889.633915] env[69367]: DEBUG nova.network.neutron [None req-17f0e1de-c958-43f7-a014-b456f23e5dbb tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: 9aec881e-8381-4626-b527-3df7e0671d8f] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 889.644401] env[69367]: DEBUG nova.scheduler.client.report [None req-4dc2f4c5-b530-43f5-82af-dd384731d332 tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 889.644775] env[69367]: DEBUG nova.compute.provider_tree [None req-4dc2f4c5-b530-43f5-82af-dd384731d332 tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 889.657245] env[69367]: DEBUG nova.scheduler.client.report [None req-4dc2f4c5-b530-43f5-82af-dd384731d332 tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 889.674471] env[69367]: DEBUG nova.scheduler.client.report [None req-4dc2f4c5-b530-43f5-82af-dd384731d332 tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 889.719244] env[69367]: DEBUG nova.network.neutron [None req-17f0e1de-c958-43f7-a014-b456f23e5dbb tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: 9aec881e-8381-4626-b527-3df7e0671d8f] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 889.964455] env[69367]: DEBUG oslo_concurrency.lockutils [None req-4be840a9-c4f6-4802-be1a-8c2336cb67fa tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Acquiring lock "b5786197-8ba8-44e1-ac01-2c9837ca5ec6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 889.964712] env[69367]: DEBUG oslo_concurrency.lockutils [None req-4be840a9-c4f6-4802-be1a-8c2336cb67fa tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Lock "b5786197-8ba8-44e1-ac01-2c9837ca5ec6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 890.057609] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7aa7dad-681b-4004-9759-57bdee7aab89 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.066285] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a269e474-bd5e-4e66-b4ce-ccb887b2031d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.098342] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9525082-7670-404b-b718-c994b10b80fe {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.106745] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85642f38-af87-4ab8-8331-7e6f816e6647 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.121017] env[69367]: DEBUG nova.compute.provider_tree [None req-4dc2f4c5-b530-43f5-82af-dd384731d332 tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 890.221899] env[69367]: DEBUG oslo_concurrency.lockutils [None req-17f0e1de-c958-43f7-a014-b456f23e5dbb tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Releasing lock "refresh_cache-9aec881e-8381-4626-b527-3df7e0671d8f" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 890.222120] env[69367]: DEBUG nova.compute.manager [None req-17f0e1de-c958-43f7-a014-b456f23e5dbb tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 890.222398] env[69367]: DEBUG nova.compute.manager [None req-17f0e1de-c958-43f7-a014-b456f23e5dbb tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: 9aec881e-8381-4626-b527-3df7e0671d8f] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 890.222579] env[69367]: DEBUG nova.network.neutron [None req-17f0e1de-c958-43f7-a014-b456f23e5dbb tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: 9aec881e-8381-4626-b527-3df7e0671d8f] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 890.240623] env[69367]: DEBUG nova.network.neutron [None req-17f0e1de-c958-43f7-a014-b456f23e5dbb tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: 9aec881e-8381-4626-b527-3df7e0671d8f] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 890.282247] env[69367]: INFO nova.scheduler.client.report [None req-adc3b60d-ebe8-4239-8397-225765297e6e tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Deleted allocations for instance 4f53c9fd-4c1a-4ac0-8116-41e54be9de18 [ 890.468525] env[69367]: DEBUG nova.compute.manager [None req-4be840a9-c4f6-4802-be1a-8c2336cb67fa tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: b5786197-8ba8-44e1-ac01-2c9837ca5ec6] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 890.508305] env[69367]: DEBUG oslo_concurrency.lockutils [None req-ffc35d42-3ee2-4ca8-9c3f-29373ee60f12 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Acquiring lock "557dc011-44a1-4240-9596-d055d57e176f" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 890.508571] env[69367]: DEBUG oslo_concurrency.lockutils [None req-ffc35d42-3ee2-4ca8-9c3f-29373ee60f12 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Lock "557dc011-44a1-4240-9596-d055d57e176f" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 890.508811] env[69367]: INFO nova.compute.manager [None req-ffc35d42-3ee2-4ca8-9c3f-29373ee60f12 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Attaching volume 1f3c6529-40e9-4e54-90e5-8dea525edf25 to /dev/sdb [ 890.546911] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53c9ed86-eb21-4a85-b812-57846a5d6723 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.555571] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7fcf911-cead-48eb-8028-e3a0287e7a54 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.570583] env[69367]: DEBUG nova.virt.block_device [None req-ffc35d42-3ee2-4ca8-9c3f-29373ee60f12 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Updating existing volume attachment record: ef2805a9-5fd9-4c89-805a-ba5c1eb30c39 {{(pid=69367) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 890.641759] env[69367]: ERROR nova.scheduler.client.report [None req-4dc2f4c5-b530-43f5-82af-dd384731d332 tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] [req-4e7bf7bc-e2f4-4391-9fd0-d971e94f8f3f] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-4e7bf7bc-e2f4-4391-9fd0-d971e94f8f3f"}]} [ 890.642212] env[69367]: DEBUG oslo_concurrency.lockutils [None req-4dc2f4c5-b530-43f5-82af-dd384731d332 tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.538s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 890.642848] env[69367]: ERROR nova.compute.manager [None req-4dc2f4c5-b530-43f5-82af-dd384731d332 tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] Setting instance vm_state to ERROR: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 890.642848] env[69367]: ERROR nova.compute.manager [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] Traceback (most recent call last): [ 890.642848] env[69367]: ERROR nova.compute.manager [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 890.642848] env[69367]: ERROR nova.compute.manager [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] yield [ 890.642848] env[69367]: ERROR nova.compute.manager [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 890.642848] env[69367]: ERROR nova.compute.manager [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] self.set_inventory_for_provider( [ 890.642848] env[69367]: ERROR nova.compute.manager [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 890.642848] env[69367]: ERROR nova.compute.manager [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 890.642848] env[69367]: ERROR nova.compute.manager [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-4e7bf7bc-e2f4-4391-9fd0-d971e94f8f3f"}]} [ 890.642848] env[69367]: ERROR nova.compute.manager [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] [ 890.642848] env[69367]: ERROR nova.compute.manager [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] During handling of the above exception, another exception occurred: [ 890.642848] env[69367]: ERROR nova.compute.manager [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] [ 890.642848] env[69367]: ERROR nova.compute.manager [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] Traceback (most recent call last): [ 890.642848] env[69367]: ERROR nova.compute.manager [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 890.642848] env[69367]: ERROR nova.compute.manager [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] self._delete_instance(context, instance, bdms) [ 890.642848] env[69367]: ERROR nova.compute.manager [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 890.642848] env[69367]: ERROR nova.compute.manager [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] self._complete_deletion(context, instance) [ 890.642848] env[69367]: ERROR nova.compute.manager [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 890.642848] env[69367]: ERROR nova.compute.manager [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] self._update_resource_tracker(context, instance) [ 890.642848] env[69367]: ERROR nova.compute.manager [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 890.642848] env[69367]: ERROR nova.compute.manager [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] self.rt.update_usage(context, instance, instance.node) [ 890.642848] env[69367]: ERROR nova.compute.manager [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 890.642848] env[69367]: ERROR nova.compute.manager [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] return f(*args, **kwargs) [ 890.642848] env[69367]: ERROR nova.compute.manager [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 890.642848] env[69367]: ERROR nova.compute.manager [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] self._update(context.elevated(), self.compute_nodes[nodename]) [ 890.642848] env[69367]: ERROR nova.compute.manager [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 890.642848] env[69367]: ERROR nova.compute.manager [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] self._update_to_placement(context, compute_node, startup) [ 890.642848] env[69367]: ERROR nova.compute.manager [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 890.642848] env[69367]: ERROR nova.compute.manager [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 890.642848] env[69367]: ERROR nova.compute.manager [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 890.642848] env[69367]: ERROR nova.compute.manager [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] return attempt.get(self._wrap_exception) [ 890.642848] env[69367]: ERROR nova.compute.manager [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 890.642848] env[69367]: ERROR nova.compute.manager [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] six.reraise(self.value[0], self.value[1], self.value[2]) [ 890.642848] env[69367]: ERROR nova.compute.manager [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 890.643932] env[69367]: ERROR nova.compute.manager [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] raise value [ 890.643932] env[69367]: ERROR nova.compute.manager [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 890.643932] env[69367]: ERROR nova.compute.manager [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 890.643932] env[69367]: ERROR nova.compute.manager [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 890.643932] env[69367]: ERROR nova.compute.manager [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] self.reportclient.update_from_provider_tree( [ 890.643932] env[69367]: ERROR nova.compute.manager [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 890.643932] env[69367]: ERROR nova.compute.manager [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] with catch_all(pd.uuid): [ 890.643932] env[69367]: ERROR nova.compute.manager [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 890.643932] env[69367]: ERROR nova.compute.manager [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] self.gen.throw(typ, value, traceback) [ 890.643932] env[69367]: ERROR nova.compute.manager [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 890.643932] env[69367]: ERROR nova.compute.manager [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] raise exception.ResourceProviderSyncFailed() [ 890.643932] env[69367]: ERROR nova.compute.manager [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 890.643932] env[69367]: ERROR nova.compute.manager [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] [ 890.645310] env[69367]: DEBUG oslo_concurrency.lockutils [None req-245e1d58-8589-4012-b694-037de2371c9e tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.040s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 890.646727] env[69367]: INFO nova.compute.claims [None req-245e1d58-8589-4012-b694-037de2371c9e tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [instance: 67a3aaff-83ce-4c6c-af48-a3d4c52188cf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 890.743881] env[69367]: DEBUG nova.network.neutron [None req-17f0e1de-c958-43f7-a014-b456f23e5dbb tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: 9aec881e-8381-4626-b527-3df7e0671d8f] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 890.790338] env[69367]: DEBUG oslo_concurrency.lockutils [None req-adc3b60d-ebe8-4239-8397-225765297e6e tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Lock "4f53c9fd-4c1a-4ac0-8116-41e54be9de18" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.589s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 890.991824] env[69367]: DEBUG oslo_concurrency.lockutils [None req-4be840a9-c4f6-4802-be1a-8c2336cb67fa tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 891.152038] env[69367]: DEBUG oslo_concurrency.lockutils [None req-4dc2f4c5-b530-43f5-82af-dd384731d332 tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Lock "05aae150-5d86-4210-ae7e-8c63e83cb907" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.432s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 891.246297] env[69367]: INFO nova.compute.manager [None req-17f0e1de-c958-43f7-a014-b456f23e5dbb tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: 9aec881e-8381-4626-b527-3df7e0671d8f] Took 1.02 seconds to deallocate network for instance. [ 891.687054] env[69367]: DEBUG nova.scheduler.client.report [None req-245e1d58-8589-4012-b694-037de2371c9e tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 891.718206] env[69367]: DEBUG nova.scheduler.client.report [None req-245e1d58-8589-4012-b694-037de2371c9e tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 891.718353] env[69367]: DEBUG nova.compute.provider_tree [None req-245e1d58-8589-4012-b694-037de2371c9e tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 891.730171] env[69367]: DEBUG nova.scheduler.client.report [None req-245e1d58-8589-4012-b694-037de2371c9e tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 891.758094] env[69367]: DEBUG nova.scheduler.client.report [None req-245e1d58-8589-4012-b694-037de2371c9e tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 892.038398] env[69367]: DEBUG oslo_concurrency.lockutils [None req-df9d3f41-569a-4508-a654-1d82f08ee23b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Acquiring lock "95efcff3-a81b-49fb-b85a-dae060c023b2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 892.038797] env[69367]: DEBUG oslo_concurrency.lockutils [None req-df9d3f41-569a-4508-a654-1d82f08ee23b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Lock "95efcff3-a81b-49fb-b85a-dae060c023b2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 892.039529] env[69367]: DEBUG oslo_concurrency.lockutils [None req-df9d3f41-569a-4508-a654-1d82f08ee23b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Acquiring lock "95efcff3-a81b-49fb-b85a-dae060c023b2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 892.039529] env[69367]: DEBUG oslo_concurrency.lockutils [None req-df9d3f41-569a-4508-a654-1d82f08ee23b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Lock "95efcff3-a81b-49fb-b85a-dae060c023b2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 892.039529] env[69367]: DEBUG oslo_concurrency.lockutils [None req-df9d3f41-569a-4508-a654-1d82f08ee23b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Lock "95efcff3-a81b-49fb-b85a-dae060c023b2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 892.043066] env[69367]: INFO nova.compute.manager [None req-df9d3f41-569a-4508-a654-1d82f08ee23b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Terminating instance [ 892.165869] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e95b0ca-1db8-447f-b18d-90cc56bd2338 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.174678] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7065d606-d37b-4657-9815-ba503ba5f505 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.206403] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-083f003e-3cfd-4714-ab16-7a8ba3ddfbc3 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.214944] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bfa9d09-4a9e-4532-9ad7-a40c79b73aa2 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.230221] env[69367]: DEBUG nova.compute.provider_tree [None req-245e1d58-8589-4012-b694-037de2371c9e tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 892.274232] env[69367]: INFO nova.scheduler.client.report [None req-17f0e1de-c958-43f7-a014-b456f23e5dbb tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Deleted allocations for instance 9aec881e-8381-4626-b527-3df7e0671d8f [ 892.549489] env[69367]: DEBUG nova.compute.manager [None req-df9d3f41-569a-4508-a654-1d82f08ee23b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Start destroying the instance on the hypervisor. {{(pid=69367) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 892.549662] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-df9d3f41-569a-4508-a654-1d82f08ee23b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Destroying instance {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 892.550650] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a48d4ff-027f-4fd7-aa58-264eea929a1b {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.559122] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-df9d3f41-569a-4508-a654-1d82f08ee23b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Powering off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 892.560068] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0ca429fb-b9c2-4800-b4fd-244212a6c541 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.567027] env[69367]: DEBUG oslo_vmware.api [None req-df9d3f41-569a-4508-a654-1d82f08ee23b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Waiting for the task: (returnval){ [ 892.567027] env[69367]: value = "task-4234111" [ 892.567027] env[69367]: _type = "Task" [ 892.567027] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.576159] env[69367]: DEBUG oslo_vmware.api [None req-df9d3f41-569a-4508-a654-1d82f08ee23b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Task: {'id': task-4234111, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.670992] env[69367]: DEBUG oslo_concurrency.lockutils [None req-4dc2f4c5-b530-43f5-82af-dd384731d332 tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 892.707678] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5d288ef0-a540-4075-bd94-d745566f8b08 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Acquiring lock "1b57dbcb-527e-4142-8dbf-5622978a7c02" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 892.708432] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5d288ef0-a540-4075-bd94-d745566f8b08 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Lock "1b57dbcb-527e-4142-8dbf-5622978a7c02" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 892.757395] env[69367]: ERROR nova.scheduler.client.report [None req-245e1d58-8589-4012-b694-037de2371c9e tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [req-df673962-e790-431c-90b0-e93cc029725d] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-df673962-e790-431c-90b0-e93cc029725d"}]} [ 892.757888] env[69367]: DEBUG oslo_concurrency.lockutils [None req-245e1d58-8589-4012-b694-037de2371c9e tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.113s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 892.758555] env[69367]: ERROR nova.compute.manager [None req-245e1d58-8589-4012-b694-037de2371c9e tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [instance: 67a3aaff-83ce-4c6c-af48-a3d4c52188cf] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 892.758555] env[69367]: ERROR nova.compute.manager [instance: 67a3aaff-83ce-4c6c-af48-a3d4c52188cf] Traceback (most recent call last): [ 892.758555] env[69367]: ERROR nova.compute.manager [instance: 67a3aaff-83ce-4c6c-af48-a3d4c52188cf] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 892.758555] env[69367]: ERROR nova.compute.manager [instance: 67a3aaff-83ce-4c6c-af48-a3d4c52188cf] yield [ 892.758555] env[69367]: ERROR nova.compute.manager [instance: 67a3aaff-83ce-4c6c-af48-a3d4c52188cf] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 892.758555] env[69367]: ERROR nova.compute.manager [instance: 67a3aaff-83ce-4c6c-af48-a3d4c52188cf] self.set_inventory_for_provider( [ 892.758555] env[69367]: ERROR nova.compute.manager [instance: 67a3aaff-83ce-4c6c-af48-a3d4c52188cf] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 892.758555] env[69367]: ERROR nova.compute.manager [instance: 67a3aaff-83ce-4c6c-af48-a3d4c52188cf] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 892.758555] env[69367]: ERROR nova.compute.manager [instance: 67a3aaff-83ce-4c6c-af48-a3d4c52188cf] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-df673962-e790-431c-90b0-e93cc029725d"}]} [ 892.758555] env[69367]: ERROR nova.compute.manager [instance: 67a3aaff-83ce-4c6c-af48-a3d4c52188cf] [ 892.758555] env[69367]: ERROR nova.compute.manager [instance: 67a3aaff-83ce-4c6c-af48-a3d4c52188cf] During handling of the above exception, another exception occurred: [ 892.758555] env[69367]: ERROR nova.compute.manager [instance: 67a3aaff-83ce-4c6c-af48-a3d4c52188cf] [ 892.758555] env[69367]: ERROR nova.compute.manager [instance: 67a3aaff-83ce-4c6c-af48-a3d4c52188cf] Traceback (most recent call last): [ 892.758555] env[69367]: ERROR nova.compute.manager [instance: 67a3aaff-83ce-4c6c-af48-a3d4c52188cf] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 892.758555] env[69367]: ERROR nova.compute.manager [instance: 67a3aaff-83ce-4c6c-af48-a3d4c52188cf] with self.rt.instance_claim(context, instance, node, allocs, [ 892.758555] env[69367]: ERROR nova.compute.manager [instance: 67a3aaff-83ce-4c6c-af48-a3d4c52188cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 892.758555] env[69367]: ERROR nova.compute.manager [instance: 67a3aaff-83ce-4c6c-af48-a3d4c52188cf] return f(*args, **kwargs) [ 892.758555] env[69367]: ERROR nova.compute.manager [instance: 67a3aaff-83ce-4c6c-af48-a3d4c52188cf] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 892.758555] env[69367]: ERROR nova.compute.manager [instance: 67a3aaff-83ce-4c6c-af48-a3d4c52188cf] self._update(elevated, cn) [ 892.758555] env[69367]: ERROR nova.compute.manager [instance: 67a3aaff-83ce-4c6c-af48-a3d4c52188cf] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 892.758555] env[69367]: ERROR nova.compute.manager [instance: 67a3aaff-83ce-4c6c-af48-a3d4c52188cf] self._update_to_placement(context, compute_node, startup) [ 892.758555] env[69367]: ERROR nova.compute.manager [instance: 67a3aaff-83ce-4c6c-af48-a3d4c52188cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 892.758555] env[69367]: ERROR nova.compute.manager [instance: 67a3aaff-83ce-4c6c-af48-a3d4c52188cf] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 892.758555] env[69367]: ERROR nova.compute.manager [instance: 67a3aaff-83ce-4c6c-af48-a3d4c52188cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 892.758555] env[69367]: ERROR nova.compute.manager [instance: 67a3aaff-83ce-4c6c-af48-a3d4c52188cf] return attempt.get(self._wrap_exception) [ 892.758555] env[69367]: ERROR nova.compute.manager [instance: 67a3aaff-83ce-4c6c-af48-a3d4c52188cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 892.758555] env[69367]: ERROR nova.compute.manager [instance: 67a3aaff-83ce-4c6c-af48-a3d4c52188cf] six.reraise(self.value[0], self.value[1], self.value[2]) [ 892.758555] env[69367]: ERROR nova.compute.manager [instance: 67a3aaff-83ce-4c6c-af48-a3d4c52188cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 892.758555] env[69367]: ERROR nova.compute.manager [instance: 67a3aaff-83ce-4c6c-af48-a3d4c52188cf] raise value [ 892.758555] env[69367]: ERROR nova.compute.manager [instance: 67a3aaff-83ce-4c6c-af48-a3d4c52188cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 892.758555] env[69367]: ERROR nova.compute.manager [instance: 67a3aaff-83ce-4c6c-af48-a3d4c52188cf] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 892.758555] env[69367]: ERROR nova.compute.manager [instance: 67a3aaff-83ce-4c6c-af48-a3d4c52188cf] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 892.758555] env[69367]: ERROR nova.compute.manager [instance: 67a3aaff-83ce-4c6c-af48-a3d4c52188cf] self.reportclient.update_from_provider_tree( [ 892.758555] env[69367]: ERROR nova.compute.manager [instance: 67a3aaff-83ce-4c6c-af48-a3d4c52188cf] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 892.759538] env[69367]: ERROR nova.compute.manager [instance: 67a3aaff-83ce-4c6c-af48-a3d4c52188cf] with catch_all(pd.uuid): [ 892.759538] env[69367]: ERROR nova.compute.manager [instance: 67a3aaff-83ce-4c6c-af48-a3d4c52188cf] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 892.759538] env[69367]: ERROR nova.compute.manager [instance: 67a3aaff-83ce-4c6c-af48-a3d4c52188cf] self.gen.throw(typ, value, traceback) [ 892.759538] env[69367]: ERROR nova.compute.manager [instance: 67a3aaff-83ce-4c6c-af48-a3d4c52188cf] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 892.759538] env[69367]: ERROR nova.compute.manager [instance: 67a3aaff-83ce-4c6c-af48-a3d4c52188cf] raise exception.ResourceProviderSyncFailed() [ 892.759538] env[69367]: ERROR nova.compute.manager [instance: 67a3aaff-83ce-4c6c-af48-a3d4c52188cf] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 892.759538] env[69367]: ERROR nova.compute.manager [instance: 67a3aaff-83ce-4c6c-af48-a3d4c52188cf] [ 892.759912] env[69367]: DEBUG nova.compute.utils [None req-245e1d58-8589-4012-b694-037de2371c9e tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [instance: 67a3aaff-83ce-4c6c-af48-a3d4c52188cf] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 892.761681] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.547s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 892.763548] env[69367]: INFO nova.compute.claims [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 892.767547] env[69367]: DEBUG nova.compute.manager [None req-245e1d58-8589-4012-b694-037de2371c9e tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [instance: 67a3aaff-83ce-4c6c-af48-a3d4c52188cf] Build of instance 67a3aaff-83ce-4c6c-af48-a3d4c52188cf was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 892.768058] env[69367]: DEBUG nova.compute.manager [None req-245e1d58-8589-4012-b694-037de2371c9e tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [instance: 67a3aaff-83ce-4c6c-af48-a3d4c52188cf] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 892.768338] env[69367]: DEBUG oslo_concurrency.lockutils [None req-245e1d58-8589-4012-b694-037de2371c9e tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Acquiring lock "refresh_cache-67a3aaff-83ce-4c6c-af48-a3d4c52188cf" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 892.768547] env[69367]: DEBUG oslo_concurrency.lockutils [None req-245e1d58-8589-4012-b694-037de2371c9e tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Acquired lock "refresh_cache-67a3aaff-83ce-4c6c-af48-a3d4c52188cf" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 892.768762] env[69367]: DEBUG nova.network.neutron [None req-245e1d58-8589-4012-b694-037de2371c9e tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [instance: 67a3aaff-83ce-4c6c-af48-a3d4c52188cf] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 892.782920] env[69367]: DEBUG oslo_concurrency.lockutils [None req-17f0e1de-c958-43f7-a014-b456f23e5dbb tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Lock "9aec881e-8381-4626-b527-3df7e0671d8f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.391s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 893.077736] env[69367]: DEBUG oslo_vmware.api [None req-df9d3f41-569a-4508-a654-1d82f08ee23b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Task: {'id': task-4234111, 'name': PowerOffVM_Task, 'duration_secs': 0.190294} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.078026] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-df9d3f41-569a-4508-a654-1d82f08ee23b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Powered off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 893.078195] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-df9d3f41-569a-4508-a654-1d82f08ee23b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Unregistering the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 893.078451] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9433e5a5-c7d2-45f6-aa92-f2654e4f8a7d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.150035] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-df9d3f41-569a-4508-a654-1d82f08ee23b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Unregistered the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 893.151018] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-df9d3f41-569a-4508-a654-1d82f08ee23b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Deleting contents of the VM from datastore datastore2 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 893.151018] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-df9d3f41-569a-4508-a654-1d82f08ee23b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Deleting the datastore file [datastore2] 95efcff3-a81b-49fb-b85a-dae060c023b2 {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 893.151382] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4a93f8ad-1025-4914-af37-2ce41aa706f0 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.161298] env[69367]: DEBUG oslo_vmware.api [None req-df9d3f41-569a-4508-a654-1d82f08ee23b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Waiting for the task: (returnval){ [ 893.161298] env[69367]: value = "task-4234114" [ 893.161298] env[69367]: _type = "Task" [ 893.161298] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.175841] env[69367]: DEBUG oslo_vmware.api [None req-df9d3f41-569a-4508-a654-1d82f08ee23b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Task: {'id': task-4234114, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.212135] env[69367]: DEBUG nova.compute.manager [None req-5d288ef0-a540-4075-bd94-d745566f8b08 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 1b57dbcb-527e-4142-8dbf-5622978a7c02] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 893.507426] env[69367]: DEBUG nova.network.neutron [None req-245e1d58-8589-4012-b694-037de2371c9e tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [instance: 67a3aaff-83ce-4c6c-af48-a3d4c52188cf] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 893.667342] env[69367]: DEBUG nova.network.neutron [None req-245e1d58-8589-4012-b694-037de2371c9e tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [instance: 67a3aaff-83ce-4c6c-af48-a3d4c52188cf] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 893.676169] env[69367]: DEBUG oslo_vmware.api [None req-df9d3f41-569a-4508-a654-1d82f08ee23b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Task: {'id': task-4234114, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.162728} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.676446] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-df9d3f41-569a-4508-a654-1d82f08ee23b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Deleted the datastore file {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 893.676630] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-df9d3f41-569a-4508-a654-1d82f08ee23b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Deleted contents of the VM from datastore datastore2 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 893.676806] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-df9d3f41-569a-4508-a654-1d82f08ee23b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Instance destroyed {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 893.676977] env[69367]: INFO nova.compute.manager [None req-df9d3f41-569a-4508-a654-1d82f08ee23b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Took 1.13 seconds to destroy the instance on the hypervisor. [ 893.677236] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-df9d3f41-569a-4508-a654-1d82f08ee23b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 893.677423] env[69367]: DEBUG nova.compute.manager [-] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 893.677515] env[69367]: DEBUG nova.network.neutron [-] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 893.741736] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5d288ef0-a540-4075-bd94-d745566f8b08 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 893.801714] env[69367]: DEBUG nova.scheduler.client.report [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 893.822080] env[69367]: DEBUG nova.scheduler.client.report [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 893.822080] env[69367]: DEBUG nova.compute.provider_tree [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 893.836587] env[69367]: DEBUG nova.scheduler.client.report [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 893.859951] env[69367]: DEBUG nova.scheduler.client.report [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 894.017291] env[69367]: DEBUG nova.compute.manager [req-8afd2b97-3913-4d93-aacd-342a75e46071 req-0af13858-6239-427e-a522-33447eb59169 service nova] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Received event network-vif-deleted-78c06e71-f193-4afd-bc2a-6864911de0ff {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 894.019201] env[69367]: INFO nova.compute.manager [req-8afd2b97-3913-4d93-aacd-342a75e46071 req-0af13858-6239-427e-a522-33447eb59169 service nova] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Neutron deleted interface 78c06e71-f193-4afd-bc2a-6864911de0ff; detaching it from the instance and deleting it from the info cache [ 894.019201] env[69367]: DEBUG nova.network.neutron [req-8afd2b97-3913-4d93-aacd-342a75e46071 req-0af13858-6239-427e-a522-33447eb59169 service nova] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 894.171015] env[69367]: DEBUG oslo_concurrency.lockutils [None req-245e1d58-8589-4012-b694-037de2371c9e tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Releasing lock "refresh_cache-67a3aaff-83ce-4c6c-af48-a3d4c52188cf" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 894.171225] env[69367]: DEBUG nova.compute.manager [None req-245e1d58-8589-4012-b694-037de2371c9e tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 894.171409] env[69367]: DEBUG nova.compute.manager [None req-245e1d58-8589-4012-b694-037de2371c9e tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [instance: 67a3aaff-83ce-4c6c-af48-a3d4c52188cf] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 894.171580] env[69367]: DEBUG nova.network.neutron [None req-245e1d58-8589-4012-b694-037de2371c9e tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [instance: 67a3aaff-83ce-4c6c-af48-a3d4c52188cf] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 894.200709] env[69367]: DEBUG nova.network.neutron [None req-245e1d58-8589-4012-b694-037de2371c9e tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [instance: 67a3aaff-83ce-4c6c-af48-a3d4c52188cf] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 894.259156] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0863a377-deb2-4803-accf-c388b69c8528 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.268336] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25e9cd1f-3e7f-4b0f-bd27-466d78b599f5 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.304456] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d72f19e-7213-4891-a931-0e670cac362d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.312819] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9242ffde-2d54-4981-8e7d-04162c0f2675 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.327911] env[69367]: DEBUG nova.compute.provider_tree [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Inventory has not changed in ProviderTree for provider: 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 894.494336] env[69367]: DEBUG nova.network.neutron [-] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 894.524211] env[69367]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-48e6fdfc-bb17-427b-befa-83124f9bdfae {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.533625] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8bde45c-fd43-40f7-aee7-965bda11653f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.562386] env[69367]: DEBUG nova.compute.manager [req-8afd2b97-3913-4d93-aacd-342a75e46071 req-0af13858-6239-427e-a522-33447eb59169 service nova] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Detach interface failed, port_id=78c06e71-f193-4afd-bc2a-6864911de0ff, reason: Instance 95efcff3-a81b-49fb-b85a-dae060c023b2 could not be found. {{(pid=69367) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11601}} [ 894.706503] env[69367]: DEBUG nova.network.neutron [None req-245e1d58-8589-4012-b694-037de2371c9e tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [instance: 67a3aaff-83ce-4c6c-af48-a3d4c52188cf] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 894.834495] env[69367]: DEBUG nova.scheduler.client.report [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Inventory has not changed for provider 19ddf8be-7305-4f70-8366-52a9957232e6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 894.997108] env[69367]: INFO nova.compute.manager [-] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Took 1.32 seconds to deallocate network for instance. [ 895.131266] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-ffc35d42-3ee2-4ca8-9c3f-29373ee60f12 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Volume attach. Driver type: vmdk {{(pid=69367) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 895.131266] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-ffc35d42-3ee2-4ca8-9c3f-29373ee60f12 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 557dc011-44a1-4240-9596-d055d57e176f] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-837752', 'volume_id': '1f3c6529-40e9-4e54-90e5-8dea525edf25', 'name': 'volume-1f3c6529-40e9-4e54-90e5-8dea525edf25', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '557dc011-44a1-4240-9596-d055d57e176f', 'attached_at': '', 'detached_at': '', 'volume_id': '1f3c6529-40e9-4e54-90e5-8dea525edf25', 'serial': '1f3c6529-40e9-4e54-90e5-8dea525edf25'} {{(pid=69367) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 895.131639] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b5f307d-97ae-4e10-84e6-9ab5d5b1444d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.157772] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25424749-ab14-4e2d-bc1b-a4de225b949d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.188561] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-ffc35d42-3ee2-4ca8-9c3f-29373ee60f12 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Reconfiguring VM instance instance-00000037 to attach disk [localhost-esx-install-datastore (1)] volume-1f3c6529-40e9-4e54-90e5-8dea525edf25/volume-1f3c6529-40e9-4e54-90e5-8dea525edf25.vmdk or device None with type thin {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 895.190414] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c2487b7f-0bdb-46b9-a26f-3721c966f6dd {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.211173] env[69367]: INFO nova.compute.manager [None req-245e1d58-8589-4012-b694-037de2371c9e tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [instance: 67a3aaff-83ce-4c6c-af48-a3d4c52188cf] Took 1.04 seconds to deallocate network for instance. [ 895.213426] env[69367]: DEBUG oslo_vmware.api [None req-ffc35d42-3ee2-4ca8-9c3f-29373ee60f12 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Waiting for the task: (returnval){ [ 895.213426] env[69367]: value = "task-4234115" [ 895.213426] env[69367]: _type = "Task" [ 895.213426] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.226816] env[69367]: DEBUG oslo_vmware.api [None req-ffc35d42-3ee2-4ca8-9c3f-29373ee60f12 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': task-4234115, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.341076] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.579s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 895.341778] env[69367]: DEBUG nova.compute.manager [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Start building networks asynchronously for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 895.344441] env[69367]: DEBUG oslo_concurrency.lockutils [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.292s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 895.346302] env[69367]: INFO nova.compute.claims [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 895.505970] env[69367]: DEBUG oslo_concurrency.lockutils [None req-df9d3f41-569a-4508-a654-1d82f08ee23b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 895.727782] env[69367]: DEBUG oslo_vmware.api [None req-ffc35d42-3ee2-4ca8-9c3f-29373ee60f12 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': task-4234115, 'name': ReconfigVM_Task, 'duration_secs': 0.221799} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.728028] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-ffc35d42-3ee2-4ca8-9c3f-29373ee60f12 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Reconfigured VM instance instance-00000037 to attach disk [localhost-esx-install-datastore (1)] volume-1f3c6529-40e9-4e54-90e5-8dea525edf25/volume-1f3c6529-40e9-4e54-90e5-8dea525edf25.vmdk or device None with type thin {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 895.733529] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c06221d3-9f17-4b76-89f2-29c108f32a87 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.749907] env[69367]: DEBUG oslo_vmware.api [None req-ffc35d42-3ee2-4ca8-9c3f-29373ee60f12 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Waiting for the task: (returnval){ [ 895.749907] env[69367]: value = "task-4234116" [ 895.749907] env[69367]: _type = "Task" [ 895.749907] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.759676] env[69367]: DEBUG oslo_vmware.api [None req-ffc35d42-3ee2-4ca8-9c3f-29373ee60f12 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': task-4234116, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.853281] env[69367]: DEBUG nova.compute.utils [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Using /dev/sd instead of None {{(pid=69367) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 895.855906] env[69367]: DEBUG nova.compute.manager [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Allocating IP information in the background. {{(pid=69367) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 895.856359] env[69367]: DEBUG nova.network.neutron [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] allocate_for_instance() {{(pid=69367) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 895.877712] env[69367]: DEBUG oslo_concurrency.lockutils [None req-58e3c20d-1527-4eb1-809c-3e88b2533207 tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Acquiring lock "e2639eea-9e67-45b5-acf0-5b015b4c0a1e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 895.878560] env[69367]: DEBUG oslo_concurrency.lockutils [None req-58e3c20d-1527-4eb1-809c-3e88b2533207 tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Lock "e2639eea-9e67-45b5-acf0-5b015b4c0a1e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 895.905046] env[69367]: DEBUG nova.policy [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0da2fd26b98d4990b19d51991ad26eda', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '68ad9e06b1fb4e5bbad98a14e0c55c60', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69367) authorize /opt/stack/nova/nova/policy.py:192}} [ 896.253374] env[69367]: INFO nova.scheduler.client.report [None req-245e1d58-8589-4012-b694-037de2371c9e tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Deleted allocations for instance 67a3aaff-83ce-4c6c-af48-a3d4c52188cf [ 896.272956] env[69367]: DEBUG oslo_vmware.api [None req-ffc35d42-3ee2-4ca8-9c3f-29373ee60f12 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': task-4234116, 'name': ReconfigVM_Task, 'duration_secs': 0.15319} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.273360] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-ffc35d42-3ee2-4ca8-9c3f-29373ee60f12 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-837752', 'volume_id': '1f3c6529-40e9-4e54-90e5-8dea525edf25', 'name': 'volume-1f3c6529-40e9-4e54-90e5-8dea525edf25', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '557dc011-44a1-4240-9596-d055d57e176f', 'attached_at': '', 'detached_at': '', 'volume_id': '1f3c6529-40e9-4e54-90e5-8dea525edf25', 'serial': '1f3c6529-40e9-4e54-90e5-8dea525edf25'} {{(pid=69367) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 896.279038] env[69367]: DEBUG nova.network.neutron [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Successfully created port: 64046d9a-7f38-4310-893f-f0a44a81b191 {{(pid=69367) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 896.358372] env[69367]: DEBUG nova.compute.manager [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Start building block device mappings for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 896.381722] env[69367]: DEBUG nova.compute.manager [None req-58e3c20d-1527-4eb1-809c-3e88b2533207 tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: e2639eea-9e67-45b5-acf0-5b015b4c0a1e] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 896.580239] env[69367]: DEBUG oslo_concurrency.lockutils [None req-bcce9e51-1784-4f95-8f13-28eea1be51a8 tempest-InstanceActionsNegativeTestJSON-92931964 tempest-InstanceActionsNegativeTestJSON-92931964-project-member] Acquiring lock "b2c2b5d8-70ae-4fda-9926-c673be42569b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 896.580239] env[69367]: DEBUG oslo_concurrency.lockutils [None req-bcce9e51-1784-4f95-8f13-28eea1be51a8 tempest-InstanceActionsNegativeTestJSON-92931964 tempest-InstanceActionsNegativeTestJSON-92931964-project-member] Lock "b2c2b5d8-70ae-4fda-9926-c673be42569b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 896.643103] env[69367]: DEBUG nova.network.neutron [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Successfully created port: e8aa72cb-4b5c-4ac1-85d5-60be3f7372b2 {{(pid=69367) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 896.767293] env[69367]: DEBUG oslo_concurrency.lockutils [None req-245e1d58-8589-4012-b694-037de2371c9e tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Lock "67a3aaff-83ce-4c6c-af48-a3d4c52188cf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.193s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 896.836082] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-892a2acd-c104-4355-bb19-ba875bf81a23 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.846603] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2881de66-f1b7-49d5-ba45-da5592055622 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.885444] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d88f031-011b-4fa3-825c-35750e509817 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.897509] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcb5abd6-28cc-40e9-a11d-16af3148ccf2 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.911756] env[69367]: DEBUG nova.compute.provider_tree [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Inventory has not changed in ProviderTree for provider: 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 896.914342] env[69367]: DEBUG oslo_concurrency.lockutils [None req-58e3c20d-1527-4eb1-809c-3e88b2533207 tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 897.088745] env[69367]: DEBUG nova.compute.manager [None req-bcce9e51-1784-4f95-8f13-28eea1be51a8 tempest-InstanceActionsNegativeTestJSON-92931964 tempest-InstanceActionsNegativeTestJSON-92931964-project-member] [instance: b2c2b5d8-70ae-4fda-9926-c673be42569b] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 897.327937] env[69367]: DEBUG nova.objects.instance [None req-ffc35d42-3ee2-4ca8-9c3f-29373ee60f12 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Lazy-loading 'flavor' on Instance uuid 557dc011-44a1-4240-9596-d055d57e176f {{(pid=69367) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 897.388162] env[69367]: DEBUG nova.compute.manager [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Start spawning the instance on the hypervisor. {{(pid=69367) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 897.416254] env[69367]: DEBUG nova.scheduler.client.report [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Inventory has not changed for provider 19ddf8be-7305-4f70-8366-52a9957232e6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 897.422523] env[69367]: DEBUG nova.virt.hardware [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-19T12:49:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-19T12:49:28Z,direct_url=,disk_format='vmdk',id=2b099420-9152-4d93-9609-4c9317824c11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cd7c200d5cd6461fb951580f8c764c42',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-19T12:49:29Z,virtual_size=,visibility=), allow threads: False {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 897.424129] env[69367]: DEBUG nova.virt.hardware [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Flavor limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 897.424129] env[69367]: DEBUG nova.virt.hardware [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Image limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 897.424129] env[69367]: DEBUG nova.virt.hardware [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Flavor pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 897.424129] env[69367]: DEBUG nova.virt.hardware [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Image pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 897.424129] env[69367]: DEBUG nova.virt.hardware [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 897.424129] env[69367]: DEBUG nova.virt.hardware [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 897.424129] env[69367]: DEBUG nova.virt.hardware [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 897.424129] env[69367]: DEBUG nova.virt.hardware [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Got 1 possible topologies {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 897.426125] env[69367]: DEBUG nova.virt.hardware [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 897.426410] env[69367]: DEBUG nova.virt.hardware [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 897.427713] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5add9f8-9354-4cfa-a73e-f73d41657936 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.438357] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eea5e89-6352-4d43-b20a-c558def068c9 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.611626] env[69367]: DEBUG oslo_concurrency.lockutils [None req-bcce9e51-1784-4f95-8f13-28eea1be51a8 tempest-InstanceActionsNegativeTestJSON-92931964 tempest-InstanceActionsNegativeTestJSON-92931964-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 897.833049] env[69367]: DEBUG oslo_concurrency.lockutils [None req-ffc35d42-3ee2-4ca8-9c3f-29373ee60f12 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Lock "557dc011-44a1-4240-9596-d055d57e176f" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.324s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 897.933400] env[69367]: DEBUG oslo_concurrency.lockutils [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.589s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 897.934076] env[69367]: DEBUG nova.compute.manager [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] Start building networks asynchronously for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 897.937270] env[69367]: DEBUG oslo_concurrency.lockutils [None req-9b5e4fb4-e357-4ec1-97d9-90f67718b83d tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.591s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 897.937475] env[69367]: DEBUG oslo_concurrency.lockutils [None req-9b5e4fb4-e357-4ec1-97d9-90f67718b83d tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 897.937649] env[69367]: INFO nova.compute.manager [None req-9b5e4fb4-e357-4ec1-97d9-90f67718b83d tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] [instance: 4a46d003-f57e-4089-aa60-757a4246f071] Successfully reverted task state from None on failure for instance. [ 897.939818] env[69367]: DEBUG oslo_concurrency.lockutils [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.570s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 897.941412] env[69367]: INFO nova.compute.claims [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 42db60d9-e5f7-4925-8f6f-d3884687414a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 897.945231] env[69367]: ERROR oslo_messaging.rpc.server [None req-9b5e4fb4-e357-4ec1-97d9-90f67718b83d tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Exception during message handling: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 897.945231] env[69367]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 897.945231] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 897.945231] env[69367]: ERROR oslo_messaging.rpc.server yield [ 897.945231] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 897.945231] env[69367]: ERROR oslo_messaging.rpc.server self.set_inventory_for_provider( [ 897.945231] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 897.945231] env[69367]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 897.945231] env[69367]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-822a58e9-b9b0-4a6e-a903-2c5a9b06e389"}]} [ 897.945231] env[69367]: ERROR oslo_messaging.rpc.server [ 897.945231] env[69367]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 897.945231] env[69367]: ERROR oslo_messaging.rpc.server [ 897.945231] env[69367]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 897.945231] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 174, in _process_incoming [ 897.945231] env[69367]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 897.945231] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 897.945231] env[69367]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 897.945231] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 897.945231] env[69367]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 897.945231] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 897.945231] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 897.945231] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 897.945231] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 897.945231] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 897.945231] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 897.945231] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 897.945231] env[69367]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 897.945231] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 167, in decorated_function [ 897.945231] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 897.945231] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 897.945231] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 897.945231] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 897.945231] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 897.945231] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 158, in decorated_function [ 897.945231] env[69367]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 897.945231] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1483, in decorated_function [ 897.945231] env[69367]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 897.945231] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 214, in decorated_function [ 897.945231] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 897.945231] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 897.945231] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 897.945231] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 897.945231] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 897.945231] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 204, in decorated_function [ 897.945231] env[69367]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 897.945231] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3388, in terminate_instance [ 897.945231] env[69367]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 897.947599] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 897.947599] env[69367]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 897.947599] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3383, in do_terminate_instance [ 897.947599] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 897.947599] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 897.947599] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 897.947599] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 897.947599] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 897.947599] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 897.947599] env[69367]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 897.947599] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 897.947599] env[69367]: ERROR oslo_messaging.rpc.server self._complete_deletion(context, instance) [ 897.947599] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 897.947599] env[69367]: ERROR oslo_messaging.rpc.server self._update_resource_tracker(context, instance) [ 897.947599] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 897.947599] env[69367]: ERROR oslo_messaging.rpc.server self.rt.update_usage(context, instance, instance.node) [ 897.947599] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 897.947599] env[69367]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 897.947599] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 897.947599] env[69367]: ERROR oslo_messaging.rpc.server self._update(context.elevated(), self.compute_nodes[nodename]) [ 897.947599] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 897.947599] env[69367]: ERROR oslo_messaging.rpc.server self._update_to_placement(context, compute_node, startup) [ 897.947599] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 897.947599] env[69367]: ERROR oslo_messaging.rpc.server return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 897.947599] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 897.947599] env[69367]: ERROR oslo_messaging.rpc.server return attempt.get(self._wrap_exception) [ 897.947599] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 897.947599] env[69367]: ERROR oslo_messaging.rpc.server six.reraise(self.value[0], self.value[1], self.value[2]) [ 897.947599] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 897.947599] env[69367]: ERROR oslo_messaging.rpc.server raise value [ 897.947599] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 897.947599] env[69367]: ERROR oslo_messaging.rpc.server attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 897.947599] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 897.947599] env[69367]: ERROR oslo_messaging.rpc.server self.reportclient.update_from_provider_tree( [ 897.947599] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 897.947599] env[69367]: ERROR oslo_messaging.rpc.server with catch_all(pd.uuid): [ 897.947599] env[69367]: ERROR oslo_messaging.rpc.server File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 897.947599] env[69367]: ERROR oslo_messaging.rpc.server self.gen.throw(typ, value, traceback) [ 897.947599] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 897.947599] env[69367]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderSyncFailed() [ 897.947599] env[69367]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 897.947599] env[69367]: ERROR oslo_messaging.rpc.server [ 898.023519] env[69367]: DEBUG oslo_concurrency.lockutils [None req-9dfba19d-5491-4354-b129-c9d6375e4a0a tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Acquiring lock "557dc011-44a1-4240-9596-d055d57e176f" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 898.023794] env[69367]: DEBUG oslo_concurrency.lockutils [None req-9dfba19d-5491-4354-b129-c9d6375e4a0a tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Lock "557dc011-44a1-4240-9596-d055d57e176f" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 898.023980] env[69367]: DEBUG nova.compute.manager [None req-9dfba19d-5491-4354-b129-c9d6375e4a0a tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Checking state {{(pid=69367) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 898.024982] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0341d13-87c6-4342-8a3f-041ea4ac7b91 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.032536] env[69367]: DEBUG nova.compute.manager [None req-9dfba19d-5491-4354-b129-c9d6375e4a0a tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69367) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 898.033193] env[69367]: DEBUG nova.objects.instance [None req-9dfba19d-5491-4354-b129-c9d6375e4a0a tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Lazy-loading 'flavor' on Instance uuid 557dc011-44a1-4240-9596-d055d57e176f {{(pid=69367) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 898.115225] env[69367]: DEBUG nova.compute.manager [req-3a7cbf77-861e-481f-b7c5-af09b91569ee req-b5d8ae61-1c6a-421a-8955-e4572cb2adca service nova] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Received event network-vif-plugged-64046d9a-7f38-4310-893f-f0a44a81b191 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 898.115540] env[69367]: DEBUG oslo_concurrency.lockutils [req-3a7cbf77-861e-481f-b7c5-af09b91569ee req-b5d8ae61-1c6a-421a-8955-e4572cb2adca service nova] Acquiring lock "652e2e23-7927-46ce-b8af-fffdb6ac8a3e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 898.115792] env[69367]: DEBUG oslo_concurrency.lockutils [req-3a7cbf77-861e-481f-b7c5-af09b91569ee req-b5d8ae61-1c6a-421a-8955-e4572cb2adca service nova] Lock "652e2e23-7927-46ce-b8af-fffdb6ac8a3e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 898.116000] env[69367]: DEBUG oslo_concurrency.lockutils [req-3a7cbf77-861e-481f-b7c5-af09b91569ee req-b5d8ae61-1c6a-421a-8955-e4572cb2adca service nova] Lock "652e2e23-7927-46ce-b8af-fffdb6ac8a3e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 898.116221] env[69367]: DEBUG nova.compute.manager [req-3a7cbf77-861e-481f-b7c5-af09b91569ee req-b5d8ae61-1c6a-421a-8955-e4572cb2adca service nova] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] No waiting events found dispatching network-vif-plugged-64046d9a-7f38-4310-893f-f0a44a81b191 {{(pid=69367) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 898.116510] env[69367]: WARNING nova.compute.manager [req-3a7cbf77-861e-481f-b7c5-af09b91569ee req-b5d8ae61-1c6a-421a-8955-e4572cb2adca service nova] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Received unexpected event network-vif-plugged-64046d9a-7f38-4310-893f-f0a44a81b191 for instance with vm_state building and task_state spawning. [ 898.205112] env[69367]: DEBUG nova.network.neutron [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Successfully updated port: 64046d9a-7f38-4310-893f-f0a44a81b191 {{(pid=69367) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 898.328050] env[69367]: DEBUG oslo_concurrency.lockutils [None req-76469027-b21c-4a45-a033-84e9fe63214f tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Acquiring lock "dd8a6c15-b61f-43bd-97e3-bf67853594b5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 898.328050] env[69367]: DEBUG oslo_concurrency.lockutils [None req-76469027-b21c-4a45-a033-84e9fe63214f tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Lock "dd8a6c15-b61f-43bd-97e3-bf67853594b5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 898.446373] env[69367]: DEBUG nova.compute.utils [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Using /dev/sd instead of None {{(pid=69367) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 898.449773] env[69367]: DEBUG nova.compute.manager [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] Allocating IP information in the background. {{(pid=69367) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 898.449978] env[69367]: DEBUG nova.network.neutron [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] allocate_for_instance() {{(pid=69367) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 898.498611] env[69367]: DEBUG nova.policy [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3c83821821cf4e3ca37f10335e896027', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '23ccf6298872456c9b884fb77954f9d7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69367) authorize /opt/stack/nova/nova/policy.py:192}} [ 898.830652] env[69367]: DEBUG nova.compute.manager [None req-76469027-b21c-4a45-a033-84e9fe63214f tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [instance: dd8a6c15-b61f-43bd-97e3-bf67853594b5] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 898.840027] env[69367]: DEBUG nova.network.neutron [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] Successfully created port: 05366f50-f66d-46a5-8e2c-f3c687488099 {{(pid=69367) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 898.953506] env[69367]: DEBUG nova.compute.manager [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] Start building block device mappings for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 899.039608] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-9dfba19d-5491-4354-b129-c9d6375e4a0a tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Powering off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 899.042900] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4bd22582-8150-4964-bcb3-6fc98bb42c49 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.052390] env[69367]: DEBUG oslo_vmware.api [None req-9dfba19d-5491-4354-b129-c9d6375e4a0a tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Waiting for the task: (returnval){ [ 899.052390] env[69367]: value = "task-4234117" [ 899.052390] env[69367]: _type = "Task" [ 899.052390] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.067170] env[69367]: DEBUG oslo_vmware.api [None req-9dfba19d-5491-4354-b129-c9d6375e4a0a tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': task-4234117, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.355746] env[69367]: DEBUG oslo_concurrency.lockutils [None req-76469027-b21c-4a45-a033-84e9fe63214f tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 899.425551] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd372ad2-d0a5-4229-beeb-46066899c08e {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.433605] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a25d9d3f-6dd0-4e19-b179-bc36baf1d697 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.468155] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec907303-ea29-42a9-b5f7-2de6e8a2893a {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.476599] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-986bd420-b438-4164-bc52-810b94a428e8 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.491024] env[69367]: DEBUG nova.compute.provider_tree [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Inventory has not changed in ProviderTree for provider: 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 899.562884] env[69367]: DEBUG oslo_vmware.api [None req-9dfba19d-5491-4354-b129-c9d6375e4a0a tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': task-4234117, 'name': PowerOffVM_Task, 'duration_secs': 0.199734} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.563203] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-9dfba19d-5491-4354-b129-c9d6375e4a0a tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Powered off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 899.563422] env[69367]: DEBUG nova.compute.manager [None req-9dfba19d-5491-4354-b129-c9d6375e4a0a tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Checking state {{(pid=69367) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 899.564241] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60b0da84-594c-4978-bf10-50cdc5eecf8b {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.972815] env[69367]: DEBUG nova.compute.manager [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] Start spawning the instance on the hypervisor. {{(pid=69367) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 899.994755] env[69367]: DEBUG nova.scheduler.client.report [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Inventory has not changed for provider 19ddf8be-7305-4f70-8366-52a9957232e6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 900.000270] env[69367]: DEBUG nova.virt.hardware [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-19T12:49:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-19T12:49:28Z,direct_url=,disk_format='vmdk',id=2b099420-9152-4d93-9609-4c9317824c11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cd7c200d5cd6461fb951580f8c764c42',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-19T12:49:29Z,virtual_size=,visibility=), allow threads: False {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 900.000548] env[69367]: DEBUG nova.virt.hardware [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Flavor limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 900.000686] env[69367]: DEBUG nova.virt.hardware [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Image limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 900.000871] env[69367]: DEBUG nova.virt.hardware [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Flavor pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 900.001032] env[69367]: DEBUG nova.virt.hardware [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Image pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 900.001187] env[69367]: DEBUG nova.virt.hardware [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 900.001409] env[69367]: DEBUG nova.virt.hardware [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 900.001577] env[69367]: DEBUG nova.virt.hardware [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 900.001745] env[69367]: DEBUG nova.virt.hardware [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Got 1 possible topologies {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 900.001915] env[69367]: DEBUG nova.virt.hardware [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 900.002112] env[69367]: DEBUG nova.virt.hardware [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 900.003095] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18df433e-bbb7-4bf7-9ea7-aa96be13bfcf {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.013424] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1581864d-4ddd-4a2b-b972-91aa30dfb884 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.076784] env[69367]: DEBUG oslo_concurrency.lockutils [None req-9dfba19d-5491-4354-b129-c9d6375e4a0a tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Lock "557dc011-44a1-4240-9596-d055d57e176f" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.053s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 900.160615] env[69367]: DEBUG nova.compute.manager [req-c6a900c7-e0b2-46ae-8b78-df9b53fb4351 req-ed48dbf2-26a0-4fc4-a929-50fb75db3324 service nova] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Received event network-changed-64046d9a-7f38-4310-893f-f0a44a81b191 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 900.160913] env[69367]: DEBUG nova.compute.manager [req-c6a900c7-e0b2-46ae-8b78-df9b53fb4351 req-ed48dbf2-26a0-4fc4-a929-50fb75db3324 service nova] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Refreshing instance network info cache due to event network-changed-64046d9a-7f38-4310-893f-f0a44a81b191. {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11772}} [ 900.161169] env[69367]: DEBUG oslo_concurrency.lockutils [req-c6a900c7-e0b2-46ae-8b78-df9b53fb4351 req-ed48dbf2-26a0-4fc4-a929-50fb75db3324 service nova] Acquiring lock "refresh_cache-652e2e23-7927-46ce-b8af-fffdb6ac8a3e" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 900.161324] env[69367]: DEBUG oslo_concurrency.lockutils [req-c6a900c7-e0b2-46ae-8b78-df9b53fb4351 req-ed48dbf2-26a0-4fc4-a929-50fb75db3324 service nova] Acquired lock "refresh_cache-652e2e23-7927-46ce-b8af-fffdb6ac8a3e" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 900.161488] env[69367]: DEBUG nova.network.neutron [req-c6a900c7-e0b2-46ae-8b78-df9b53fb4351 req-ed48dbf2-26a0-4fc4-a929-50fb75db3324 service nova] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Refreshing network info cache for port 64046d9a-7f38-4310-893f-f0a44a81b191 {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 900.458601] env[69367]: DEBUG nova.compute.manager [req-1ec1a005-e693-4a6d-89da-3a446f20272d req-a218ff31-f862-4db2-8321-c8e0b03a7aa6 service nova] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Received event network-vif-plugged-e8aa72cb-4b5c-4ac1-85d5-60be3f7372b2 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 900.458890] env[69367]: DEBUG oslo_concurrency.lockutils [req-1ec1a005-e693-4a6d-89da-3a446f20272d req-a218ff31-f862-4db2-8321-c8e0b03a7aa6 service nova] Acquiring lock "652e2e23-7927-46ce-b8af-fffdb6ac8a3e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 900.459059] env[69367]: DEBUG oslo_concurrency.lockutils [req-1ec1a005-e693-4a6d-89da-3a446f20272d req-a218ff31-f862-4db2-8321-c8e0b03a7aa6 service nova] Lock "652e2e23-7927-46ce-b8af-fffdb6ac8a3e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 900.459302] env[69367]: DEBUG oslo_concurrency.lockutils [req-1ec1a005-e693-4a6d-89da-3a446f20272d req-a218ff31-f862-4db2-8321-c8e0b03a7aa6 service nova] Lock "652e2e23-7927-46ce-b8af-fffdb6ac8a3e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 900.459508] env[69367]: DEBUG nova.compute.manager [req-1ec1a005-e693-4a6d-89da-3a446f20272d req-a218ff31-f862-4db2-8321-c8e0b03a7aa6 service nova] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] No waiting events found dispatching network-vif-plugged-e8aa72cb-4b5c-4ac1-85d5-60be3f7372b2 {{(pid=69367) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 900.459678] env[69367]: WARNING nova.compute.manager [req-1ec1a005-e693-4a6d-89da-3a446f20272d req-a218ff31-f862-4db2-8321-c8e0b03a7aa6 service nova] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Received unexpected event network-vif-plugged-e8aa72cb-4b5c-4ac1-85d5-60be3f7372b2 for instance with vm_state building and task_state spawning. [ 900.507738] env[69367]: DEBUG oslo_concurrency.lockutils [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.568s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 900.508404] env[69367]: DEBUG nova.compute.manager [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 42db60d9-e5f7-4925-8f6f-d3884687414a] Start building networks asynchronously for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 900.514122] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e35df00c-08dc-425a-a9e2-95b6e24a7398 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.595s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 900.514359] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e35df00c-08dc-425a-a9e2-95b6e24a7398 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 900.514597] env[69367]: INFO nova.compute.manager [None req-e35df00c-08dc-425a-a9e2-95b6e24a7398 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] [instance: c272b0ae-6313-46ab-977c-6de255e77675] Successfully reverted task state from None on failure for instance. [ 900.516892] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5e205b40-cfcf-4563-9c86-1720cbb819cc tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.324s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 900.517755] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5e205b40-cfcf-4563-9c86-1720cbb819cc tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 900.520104] env[69367]: DEBUG oslo_concurrency.lockutils [None req-775ed4e5-8bd2-4d2c-8d8d-3e33cb192c86 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.067s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 900.520337] env[69367]: DEBUG nova.objects.instance [None req-775ed4e5-8bd2-4d2c-8d8d-3e33cb192c86 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Lazy-loading 'resources' on Instance uuid 54a1f586-481d-427e-ba0b-be90e5573bd3 {{(pid=69367) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 900.522306] env[69367]: ERROR oslo_messaging.rpc.server [None req-e35df00c-08dc-425a-a9e2-95b6e24a7398 tempest-ServersListShow298Test-1666893661 tempest-ServersListShow298Test-1666893661-project-member] Exception during message handling: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 900.522306] env[69367]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 900.522306] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 900.522306] env[69367]: ERROR oslo_messaging.rpc.server yield [ 900.522306] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 900.522306] env[69367]: ERROR oslo_messaging.rpc.server self.set_inventory_for_provider( [ 900.522306] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 900.522306] env[69367]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 900.522306] env[69367]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-817b52a6-ba9b-4045-8df8-f8cd2f797b4d"}]} [ 900.522306] env[69367]: ERROR oslo_messaging.rpc.server [ 900.522306] env[69367]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 900.522306] env[69367]: ERROR oslo_messaging.rpc.server [ 900.522306] env[69367]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 900.522306] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 174, in _process_incoming [ 900.522306] env[69367]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 900.522306] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 900.522306] env[69367]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 900.522306] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 900.522306] env[69367]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 900.522306] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 900.522306] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 900.522306] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 900.522306] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 900.522306] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 900.522306] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 900.522306] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 900.522306] env[69367]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 900.522306] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 167, in decorated_function [ 900.522306] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 900.522306] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 900.522306] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 900.522306] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 900.522306] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 900.522306] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 158, in decorated_function [ 900.522306] env[69367]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 900.522306] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1483, in decorated_function [ 900.522306] env[69367]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 900.522306] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 214, in decorated_function [ 900.522306] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 900.522306] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 900.522306] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 900.522306] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 900.522306] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 900.522306] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 204, in decorated_function [ 900.522306] env[69367]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 900.522306] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3388, in terminate_instance [ 900.522306] env[69367]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 900.523523] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 900.523523] env[69367]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 900.523523] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3383, in do_terminate_instance [ 900.523523] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 900.523523] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 900.523523] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 900.523523] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 900.523523] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 900.523523] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 900.523523] env[69367]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 900.523523] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 900.523523] env[69367]: ERROR oslo_messaging.rpc.server self._complete_deletion(context, instance) [ 900.523523] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 900.523523] env[69367]: ERROR oslo_messaging.rpc.server self._update_resource_tracker(context, instance) [ 900.523523] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 900.523523] env[69367]: ERROR oslo_messaging.rpc.server self.rt.update_usage(context, instance, instance.node) [ 900.523523] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 900.523523] env[69367]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 900.523523] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 900.523523] env[69367]: ERROR oslo_messaging.rpc.server self._update(context.elevated(), self.compute_nodes[nodename]) [ 900.523523] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 900.523523] env[69367]: ERROR oslo_messaging.rpc.server self._update_to_placement(context, compute_node, startup) [ 900.523523] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 900.523523] env[69367]: ERROR oslo_messaging.rpc.server return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 900.523523] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 900.523523] env[69367]: ERROR oslo_messaging.rpc.server return attempt.get(self._wrap_exception) [ 900.523523] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 900.523523] env[69367]: ERROR oslo_messaging.rpc.server six.reraise(self.value[0], self.value[1], self.value[2]) [ 900.523523] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 900.523523] env[69367]: ERROR oslo_messaging.rpc.server raise value [ 900.523523] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 900.523523] env[69367]: ERROR oslo_messaging.rpc.server attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 900.523523] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 900.523523] env[69367]: ERROR oslo_messaging.rpc.server self.reportclient.update_from_provider_tree( [ 900.523523] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 900.523523] env[69367]: ERROR oslo_messaging.rpc.server with catch_all(pd.uuid): [ 900.523523] env[69367]: ERROR oslo_messaging.rpc.server File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 900.523523] env[69367]: ERROR oslo_messaging.rpc.server self.gen.throw(typ, value, traceback) [ 900.523523] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 900.523523] env[69367]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderSyncFailed() [ 900.523523] env[69367]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 900.523523] env[69367]: ERROR oslo_messaging.rpc.server [ 900.533192] env[69367]: DEBUG nova.network.neutron [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] Successfully updated port: 05366f50-f66d-46a5-8e2c-f3c687488099 {{(pid=69367) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 900.554288] env[69367]: INFO nova.scheduler.client.report [None req-5e205b40-cfcf-4563-9c86-1720cbb819cc tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Deleted allocations for instance 4a46d003-f57e-4089-aa60-757a4246f071 [ 900.575862] env[69367]: DEBUG nova.network.neutron [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Successfully updated port: e8aa72cb-4b5c-4ac1-85d5-60be3f7372b2 {{(pid=69367) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 900.642058] env[69367]: DEBUG nova.objects.instance [None req-da296458-c371-45ea-93fe-93d2a9f76a86 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Lazy-loading 'flavor' on Instance uuid 557dc011-44a1-4240-9596-d055d57e176f {{(pid=69367) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 900.697234] env[69367]: DEBUG nova.network.neutron [req-c6a900c7-e0b2-46ae-8b78-df9b53fb4351 req-ed48dbf2-26a0-4fc4-a929-50fb75db3324 service nova] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 900.770664] env[69367]: DEBUG nova.network.neutron [req-c6a900c7-e0b2-46ae-8b78-df9b53fb4351 req-ed48dbf2-26a0-4fc4-a929-50fb75db3324 service nova] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 901.024921] env[69367]: DEBUG nova.compute.utils [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Using /dev/sd instead of None {{(pid=69367) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 901.029359] env[69367]: DEBUG nova.compute.manager [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 42db60d9-e5f7-4925-8f6f-d3884687414a] Allocating IP information in the background. {{(pid=69367) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 901.029617] env[69367]: DEBUG nova.network.neutron [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 42db60d9-e5f7-4925-8f6f-d3884687414a] allocate_for_instance() {{(pid=69367) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 901.037138] env[69367]: DEBUG oslo_concurrency.lockutils [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Acquiring lock "refresh_cache-f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 901.037138] env[69367]: DEBUG oslo_concurrency.lockutils [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Acquired lock "refresh_cache-f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 901.037138] env[69367]: DEBUG nova.network.neutron [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 901.064093] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5e205b40-cfcf-4563-9c86-1720cbb819cc tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Lock "4a46d003-f57e-4089-aa60-757a4246f071" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.113s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 901.070410] env[69367]: DEBUG nova.policy [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5360c8b93a954bd0832ebadea6983ef1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f5bc3d470905412ea72a8eedb98e9e47', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69367) authorize /opt/stack/nova/nova/policy.py:192}} [ 901.078998] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Acquiring lock "refresh_cache-652e2e23-7927-46ce-b8af-fffdb6ac8a3e" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 901.148644] env[69367]: DEBUG oslo_concurrency.lockutils [None req-da296458-c371-45ea-93fe-93d2a9f76a86 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Acquiring lock "refresh_cache-557dc011-44a1-4240-9596-d055d57e176f" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 901.148847] env[69367]: DEBUG oslo_concurrency.lockutils [None req-da296458-c371-45ea-93fe-93d2a9f76a86 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Acquired lock "refresh_cache-557dc011-44a1-4240-9596-d055d57e176f" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 901.149041] env[69367]: DEBUG nova.network.neutron [None req-da296458-c371-45ea-93fe-93d2a9f76a86 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 901.149250] env[69367]: DEBUG nova.objects.instance [None req-da296458-c371-45ea-93fe-93d2a9f76a86 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Lazy-loading 'info_cache' on Instance uuid 557dc011-44a1-4240-9596-d055d57e176f {{(pid=69367) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 901.275688] env[69367]: DEBUG oslo_concurrency.lockutils [req-c6a900c7-e0b2-46ae-8b78-df9b53fb4351 req-ed48dbf2-26a0-4fc4-a929-50fb75db3324 service nova] Releasing lock "refresh_cache-652e2e23-7927-46ce-b8af-fffdb6ac8a3e" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 901.276316] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Acquired lock "refresh_cache-652e2e23-7927-46ce-b8af-fffdb6ac8a3e" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 901.276508] env[69367]: DEBUG nova.network.neutron [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 901.431542] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04885838-f059-49aa-89f1-529199a8fdcb {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.442577] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-003e055f-ad18-41b8-b7af-d27e29631fbe {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.475660] env[69367]: DEBUG nova.network.neutron [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 42db60d9-e5f7-4925-8f6f-d3884687414a] Successfully created port: 1daae92e-1898-467c-be43-e8f27bff4242 {{(pid=69367) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 901.480649] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f0db06f-d607-4e11-812c-17d4a89754be {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.486494] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa7d2793-00ec-48ce-a828-f5779d1860a7 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.501716] env[69367]: DEBUG nova.compute.provider_tree [None req-775ed4e5-8bd2-4d2c-8d8d-3e33cb192c86 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Inventory has not changed in ProviderTree for provider: 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 901.529757] env[69367]: DEBUG nova.compute.manager [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 42db60d9-e5f7-4925-8f6f-d3884687414a] Start building block device mappings for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 901.577902] env[69367]: DEBUG nova.network.neutron [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 901.653226] env[69367]: DEBUG nova.objects.base [None req-da296458-c371-45ea-93fe-93d2a9f76a86 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Object Instance<557dc011-44a1-4240-9596-d055d57e176f> lazy-loaded attributes: flavor,info_cache {{(pid=69367) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 901.739177] env[69367]: DEBUG nova.network.neutron [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] Updating instance_info_cache with network_info: [{"id": "05366f50-f66d-46a5-8e2c-f3c687488099", "address": "fa:16:3e:c4:18:a6", "network": {"id": "e86884a5-cadf-4d85-a52e-08c52cce43d1", "bridge": "br-int", "label": "tempest-ServersTestJSON-944567210-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23ccf6298872456c9b884fb77954f9d7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dba18786-598d-4e06-96db-b3dc1717530f", "external-id": "nsx-vlan-transportzone-741", "segmentation_id": 741, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05366f50-f6", "ovs_interfaceid": "05366f50-f66d-46a5-8e2c-f3c687488099", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 901.813815] env[69367]: DEBUG nova.network.neutron [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 902.005052] env[69367]: DEBUG nova.scheduler.client.report [None req-775ed4e5-8bd2-4d2c-8d8d-3e33cb192c86 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Inventory has not changed for provider 19ddf8be-7305-4f70-8366-52a9957232e6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 902.084461] env[69367]: DEBUG nova.network.neutron [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Updating instance_info_cache with network_info: [{"id": "64046d9a-7f38-4310-893f-f0a44a81b191", "address": "fa:16:3e:e5:e3:ef", "network": {"id": "9cd5f031-5f72-4a59-a9a7-e6b8fec10959", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-645637051", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.154", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68ad9e06b1fb4e5bbad98a14e0c55c60", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d2e4070-a78e-4d08-a104-b6312ab65577", "external-id": "nsx-vlan-transportzone-292", "segmentation_id": 292, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap64046d9a-7f", "ovs_interfaceid": "64046d9a-7f38-4310-893f-f0a44a81b191", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e8aa72cb-4b5c-4ac1-85d5-60be3f7372b2", "address": "fa:16:3e:7b:44:36", "network": {"id": "5f3745be-c047-4b85-9371-a372c6cd2521", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-78713587", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.148", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "68ad9e06b1fb4e5bbad98a14e0c55c60", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a92a4ffe-7939-4697-bf98-5b22e2c7feda", "external-id": "nsx-vlan-transportzone-732", "segmentation_id": 732, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8aa72cb-4b", "ovs_interfaceid": "e8aa72cb-4b5c-4ac1-85d5-60be3f7372b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 902.196019] env[69367]: DEBUG nova.compute.manager [req-b76b1679-da6d-44d3-9912-82a0f38b6b94 req-04e13eaa-9ef6-48c2-b780-822ab3b90b5e service nova] [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] Received event network-vif-plugged-05366f50-f66d-46a5-8e2c-f3c687488099 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 902.196112] env[69367]: DEBUG oslo_concurrency.lockutils [req-b76b1679-da6d-44d3-9912-82a0f38b6b94 req-04e13eaa-9ef6-48c2-b780-822ab3b90b5e service nova] Acquiring lock "f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 902.196373] env[69367]: DEBUG oslo_concurrency.lockutils [req-b76b1679-da6d-44d3-9912-82a0f38b6b94 req-04e13eaa-9ef6-48c2-b780-822ab3b90b5e service nova] Lock "f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 902.196529] env[69367]: DEBUG oslo_concurrency.lockutils [req-b76b1679-da6d-44d3-9912-82a0f38b6b94 req-04e13eaa-9ef6-48c2-b780-822ab3b90b5e service nova] Lock "f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 902.196709] env[69367]: DEBUG nova.compute.manager [req-b76b1679-da6d-44d3-9912-82a0f38b6b94 req-04e13eaa-9ef6-48c2-b780-822ab3b90b5e service nova] [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] No waiting events found dispatching network-vif-plugged-05366f50-f66d-46a5-8e2c-f3c687488099 {{(pid=69367) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 902.196893] env[69367]: WARNING nova.compute.manager [req-b76b1679-da6d-44d3-9912-82a0f38b6b94 req-04e13eaa-9ef6-48c2-b780-822ab3b90b5e service nova] [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] Received unexpected event network-vif-plugged-05366f50-f66d-46a5-8e2c-f3c687488099 for instance with vm_state building and task_state spawning. [ 902.197088] env[69367]: DEBUG nova.compute.manager [req-b76b1679-da6d-44d3-9912-82a0f38b6b94 req-04e13eaa-9ef6-48c2-b780-822ab3b90b5e service nova] [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] Received event network-changed-05366f50-f66d-46a5-8e2c-f3c687488099 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 902.197239] env[69367]: DEBUG nova.compute.manager [req-b76b1679-da6d-44d3-9912-82a0f38b6b94 req-04e13eaa-9ef6-48c2-b780-822ab3b90b5e service nova] [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] Refreshing instance network info cache due to event network-changed-05366f50-f66d-46a5-8e2c-f3c687488099. {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11772}} [ 902.197434] env[69367]: DEBUG oslo_concurrency.lockutils [req-b76b1679-da6d-44d3-9912-82a0f38b6b94 req-04e13eaa-9ef6-48c2-b780-822ab3b90b5e service nova] Acquiring lock "refresh_cache-f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 902.242593] env[69367]: DEBUG oslo_concurrency.lockutils [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Releasing lock "refresh_cache-f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 902.242983] env[69367]: DEBUG nova.compute.manager [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] Instance network_info: |[{"id": "05366f50-f66d-46a5-8e2c-f3c687488099", "address": "fa:16:3e:c4:18:a6", "network": {"id": "e86884a5-cadf-4d85-a52e-08c52cce43d1", "bridge": "br-int", "label": "tempest-ServersTestJSON-944567210-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23ccf6298872456c9b884fb77954f9d7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dba18786-598d-4e06-96db-b3dc1717530f", "external-id": "nsx-vlan-transportzone-741", "segmentation_id": 741, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05366f50-f6", "ovs_interfaceid": "05366f50-f66d-46a5-8e2c-f3c687488099", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69367) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 902.243352] env[69367]: DEBUG oslo_concurrency.lockutils [req-b76b1679-da6d-44d3-9912-82a0f38b6b94 req-04e13eaa-9ef6-48c2-b780-822ab3b90b5e service nova] Acquired lock "refresh_cache-f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 902.243539] env[69367]: DEBUG nova.network.neutron [req-b76b1679-da6d-44d3-9912-82a0f38b6b94 req-04e13eaa-9ef6-48c2-b780-822ab3b90b5e service nova] [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] Refreshing network info cache for port 05366f50-f66d-46a5-8e2c-f3c687488099 {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 902.245173] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c4:18:a6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dba18786-598d-4e06-96db-b3dc1717530f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '05366f50-f66d-46a5-8e2c-f3c687488099', 'vif_model': 'vmxnet3'}] {{(pid=69367) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 902.252416] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Creating folder: Project (23ccf6298872456c9b884fb77954f9d7). Parent ref: group-v837645. {{(pid=69367) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 902.255527] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c8b28891-c2f6-4dcf-95db-31a2d2299d91 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.269203] env[69367]: INFO nova.virt.vmwareapi.vm_util [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Created folder: Project (23ccf6298872456c9b884fb77954f9d7) in parent group-v837645. [ 902.269447] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Creating folder: Instances. Parent ref: group-v837753. {{(pid=69367) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 902.269693] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-99cbee39-f4aa-423f-bab6-61d1efb188ef {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.279433] env[69367]: INFO nova.virt.vmwareapi.vm_util [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Created folder: Instances in parent group-v837753. [ 902.279704] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 902.279906] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] Creating VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 902.280125] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ab1f97d2-9074-4740-ade5-45d8448db1ad {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.300161] env[69367]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 902.300161] env[69367]: value = "task-4234120" [ 902.300161] env[69367]: _type = "Task" [ 902.300161] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.308314] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234120, 'name': CreateVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.414035] env[69367]: DEBUG nova.network.neutron [None req-da296458-c371-45ea-93fe-93d2a9f76a86 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Updating instance_info_cache with network_info: [{"id": "cb95db9d-9279-4fd5-bf3d-c4edcbfd26c1", "address": "fa:16:3e:ff:f7:e3", "network": {"id": "0bfdc337-bb57-4c33-9907-9098384ed460", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1159515822-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.190", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd27807405a646e989b95325358a87eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9bb629cd-6d0f-4bed-965c-bd04a2f3ec49", "external-id": "nsx-vlan-transportzone-848", "segmentation_id": 848, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcb95db9d-92", "ovs_interfaceid": "cb95db9d-9279-4fd5-bf3d-c4edcbfd26c1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 902.512967] env[69367]: DEBUG oslo_concurrency.lockutils [None req-775ed4e5-8bd2-4d2c-8d8d-3e33cb192c86 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.993s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 902.515841] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a6fb10fd-5288-49e0-b35b-8dedcaa9fd7f tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.048s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 902.515841] env[69367]: DEBUG nova.objects.instance [None req-a6fb10fd-5288-49e0-b35b-8dedcaa9fd7f tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Lazy-loading 'resources' on Instance uuid 46b6bc45-57f0-4850-9249-6bbb22b162c6 {{(pid=69367) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 902.539022] env[69367]: INFO nova.scheduler.client.report [None req-775ed4e5-8bd2-4d2c-8d8d-3e33cb192c86 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Deleted allocations for instance 54a1f586-481d-427e-ba0b-be90e5573bd3 [ 902.540422] env[69367]: DEBUG nova.compute.manager [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 42db60d9-e5f7-4925-8f6f-d3884687414a] Start spawning the instance on the hypervisor. {{(pid=69367) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 902.578812] env[69367]: DEBUG nova.virt.hardware [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-19T12:49:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-19T12:49:28Z,direct_url=,disk_format='vmdk',id=2b099420-9152-4d93-9609-4c9317824c11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cd7c200d5cd6461fb951580f8c764c42',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-19T12:49:29Z,virtual_size=,visibility=), allow threads: False {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 902.579264] env[69367]: DEBUG nova.virt.hardware [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Flavor limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 902.579264] env[69367]: DEBUG nova.virt.hardware [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Image limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 902.579477] env[69367]: DEBUG nova.virt.hardware [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Flavor pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 902.579731] env[69367]: DEBUG nova.virt.hardware [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Image pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 902.579875] env[69367]: DEBUG nova.virt.hardware [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 902.580126] env[69367]: DEBUG nova.virt.hardware [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 902.580319] env[69367]: DEBUG nova.virt.hardware [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 902.580500] env[69367]: DEBUG nova.virt.hardware [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Got 1 possible topologies {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 902.580773] env[69367]: DEBUG nova.virt.hardware [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 902.580995] env[69367]: DEBUG nova.virt.hardware [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 902.581962] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f50197af-0ff0-4561-bdc6-cc0733e11461 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.587390] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Releasing lock "refresh_cache-652e2e23-7927-46ce-b8af-fffdb6ac8a3e" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 902.587917] env[69367]: DEBUG nova.compute.manager [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Instance network_info: |[{"id": "64046d9a-7f38-4310-893f-f0a44a81b191", "address": "fa:16:3e:e5:e3:ef", "network": {"id": "9cd5f031-5f72-4a59-a9a7-e6b8fec10959", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-645637051", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.154", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68ad9e06b1fb4e5bbad98a14e0c55c60", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d2e4070-a78e-4d08-a104-b6312ab65577", "external-id": "nsx-vlan-transportzone-292", "segmentation_id": 292, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap64046d9a-7f", "ovs_interfaceid": "64046d9a-7f38-4310-893f-f0a44a81b191", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e8aa72cb-4b5c-4ac1-85d5-60be3f7372b2", "address": "fa:16:3e:7b:44:36", "network": {"id": "5f3745be-c047-4b85-9371-a372c6cd2521", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-78713587", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.148", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "68ad9e06b1fb4e5bbad98a14e0c55c60", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a92a4ffe-7939-4697-bf98-5b22e2c7feda", "external-id": "nsx-vlan-transportzone-732", "segmentation_id": 732, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8aa72cb-4b", "ovs_interfaceid": "e8aa72cb-4b5c-4ac1-85d5-60be3f7372b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69367) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 902.592051] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e5:e3:ef', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3d2e4070-a78e-4d08-a104-b6312ab65577', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '64046d9a-7f38-4310-893f-f0a44a81b191', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:7b:44:36', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a92a4ffe-7939-4697-bf98-5b22e2c7feda', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e8aa72cb-4b5c-4ac1-85d5-60be3f7372b2', 'vif_model': 'vmxnet3'}] {{(pid=69367) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 902.602736] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 902.604872] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Creating VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 902.606151] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8465abf1-9c8a-43d5-869f-a5a291169151 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.612221] env[69367]: DEBUG nova.compute.manager [req-f6eb513d-7d17-4125-8839-46bcfc058d78 req-4a6b6b37-105d-4f21-9e35-9633b7f9d06e service nova] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Received event network-changed-e8aa72cb-4b5c-4ac1-85d5-60be3f7372b2 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 902.612486] env[69367]: DEBUG nova.compute.manager [req-f6eb513d-7d17-4125-8839-46bcfc058d78 req-4a6b6b37-105d-4f21-9e35-9633b7f9d06e service nova] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Refreshing instance network info cache due to event network-changed-e8aa72cb-4b5c-4ac1-85d5-60be3f7372b2. {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11772}} [ 902.612808] env[69367]: DEBUG oslo_concurrency.lockutils [req-f6eb513d-7d17-4125-8839-46bcfc058d78 req-4a6b6b37-105d-4f21-9e35-9633b7f9d06e service nova] Acquiring lock "refresh_cache-652e2e23-7927-46ce-b8af-fffdb6ac8a3e" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 902.613026] env[69367]: DEBUG oslo_concurrency.lockutils [req-f6eb513d-7d17-4125-8839-46bcfc058d78 req-4a6b6b37-105d-4f21-9e35-9633b7f9d06e service nova] Acquired lock "refresh_cache-652e2e23-7927-46ce-b8af-fffdb6ac8a3e" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 902.613315] env[69367]: DEBUG nova.network.neutron [req-f6eb513d-7d17-4125-8839-46bcfc058d78 req-4a6b6b37-105d-4f21-9e35-9633b7f9d06e service nova] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Refreshing network info cache for port e8aa72cb-4b5c-4ac1-85d5-60be3f7372b2 {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 902.614673] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-48c07cdd-6b3f-47a4-9b53-e05a9e8f12e4 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.651191] env[69367]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 902.651191] env[69367]: value = "task-4234121" [ 902.651191] env[69367]: _type = "Task" [ 902.651191] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.662029] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234121, 'name': CreateVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.811067] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234120, 'name': CreateVM_Task, 'duration_secs': 0.346427} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.811230] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] Created VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 902.811912] env[69367]: DEBUG oslo_concurrency.lockutils [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 902.812084] env[69367]: DEBUG oslo_concurrency.lockutils [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 902.812452] env[69367]: DEBUG oslo_concurrency.lockutils [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 902.812732] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fa662fef-88d4-4216-8f37-ccb8a693c11a {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.822072] env[69367]: DEBUG oslo_vmware.api [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Waiting for the task: (returnval){ [ 902.822072] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52a58240-414f-8b8c-628b-cdd1d67017e8" [ 902.822072] env[69367]: _type = "Task" [ 902.822072] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.829955] env[69367]: DEBUG oslo_vmware.api [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52a58240-414f-8b8c-628b-cdd1d67017e8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.888304] env[69367]: DEBUG nova.network.neutron [req-f6eb513d-7d17-4125-8839-46bcfc058d78 req-4a6b6b37-105d-4f21-9e35-9633b7f9d06e service nova] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Updated VIF entry in instance network info cache for port e8aa72cb-4b5c-4ac1-85d5-60be3f7372b2. {{(pid=69367) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 902.888923] env[69367]: DEBUG nova.network.neutron [req-f6eb513d-7d17-4125-8839-46bcfc058d78 req-4a6b6b37-105d-4f21-9e35-9633b7f9d06e service nova] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Updating instance_info_cache with network_info: [{"id": "64046d9a-7f38-4310-893f-f0a44a81b191", "address": "fa:16:3e:e5:e3:ef", "network": {"id": "9cd5f031-5f72-4a59-a9a7-e6b8fec10959", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-645637051", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.154", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68ad9e06b1fb4e5bbad98a14e0c55c60", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d2e4070-a78e-4d08-a104-b6312ab65577", "external-id": "nsx-vlan-transportzone-292", "segmentation_id": 292, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap64046d9a-7f", "ovs_interfaceid": "64046d9a-7f38-4310-893f-f0a44a81b191", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e8aa72cb-4b5c-4ac1-85d5-60be3f7372b2", "address": "fa:16:3e:7b:44:36", "network": {"id": "5f3745be-c047-4b85-9371-a372c6cd2521", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-78713587", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.148", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "68ad9e06b1fb4e5bbad98a14e0c55c60", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a92a4ffe-7939-4697-bf98-5b22e2c7feda", "external-id": "nsx-vlan-transportzone-732", "segmentation_id": 732, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8aa72cb-4b", "ovs_interfaceid": "e8aa72cb-4b5c-4ac1-85d5-60be3f7372b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 902.917211] env[69367]: DEBUG oslo_concurrency.lockutils [None req-da296458-c371-45ea-93fe-93d2a9f76a86 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Releasing lock "refresh_cache-557dc011-44a1-4240-9596-d055d57e176f" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 903.053549] env[69367]: DEBUG oslo_concurrency.lockutils [None req-775ed4e5-8bd2-4d2c-8d8d-3e33cb192c86 tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Lock "54a1f586-481d-427e-ba0b-be90e5573bd3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.979s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 903.116083] env[69367]: DEBUG nova.network.neutron [req-b76b1679-da6d-44d3-9912-82a0f38b6b94 req-04e13eaa-9ef6-48c2-b780-822ab3b90b5e service nova] [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] Updated VIF entry in instance network info cache for port 05366f50-f66d-46a5-8e2c-f3c687488099. {{(pid=69367) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 903.116432] env[69367]: DEBUG nova.network.neutron [req-b76b1679-da6d-44d3-9912-82a0f38b6b94 req-04e13eaa-9ef6-48c2-b780-822ab3b90b5e service nova] [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] Updating instance_info_cache with network_info: [{"id": "05366f50-f66d-46a5-8e2c-f3c687488099", "address": "fa:16:3e:c4:18:a6", "network": {"id": "e86884a5-cadf-4d85-a52e-08c52cce43d1", "bridge": "br-int", "label": "tempest-ServersTestJSON-944567210-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23ccf6298872456c9b884fb77954f9d7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dba18786-598d-4e06-96db-b3dc1717530f", "external-id": "nsx-vlan-transportzone-741", "segmentation_id": 741, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05366f50-f6", "ovs_interfaceid": "05366f50-f66d-46a5-8e2c-f3c687488099", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 903.165734] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234121, 'name': CreateVM_Task, 'duration_secs': 0.38089} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.165734] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Created VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 903.166612] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 903.257310] env[69367]: DEBUG nova.network.neutron [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 42db60d9-e5f7-4925-8f6f-d3884687414a] Successfully updated port: 1daae92e-1898-467c-be43-e8f27bff4242 {{(pid=69367) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 903.335736] env[69367]: DEBUG oslo_vmware.api [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52a58240-414f-8b8c-628b-cdd1d67017e8, 'name': SearchDatastore_Task, 'duration_secs': 0.010162} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.336046] env[69367]: DEBUG oslo_concurrency.lockutils [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 903.336284] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] Processing image 2b099420-9152-4d93-9609-4c9317824c11 {{(pid=69367) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 903.336523] env[69367]: DEBUG oslo_concurrency.lockutils [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 903.336711] env[69367]: DEBUG oslo_concurrency.lockutils [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 903.336901] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 903.339728] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 903.340037] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 903.340273] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-889dd94f-a7d0-4d27-b2e2-3d88799ef4bd {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.342724] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea6e5053-bef2-4d4f-acce-a8cd9d8421de {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.348664] env[69367]: DEBUG oslo_vmware.api [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Waiting for the task: (returnval){ [ 903.348664] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]529fb13c-e8c0-4c4c-f7ab-e89b3a16ee71" [ 903.348664] env[69367]: _type = "Task" [ 903.348664] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.354661] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 903.354844] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69367) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 903.356024] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0c699a90-a0d7-4f45-8ff5-bceb423503bd {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.361428] env[69367]: DEBUG oslo_vmware.api [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]529fb13c-e8c0-4c4c-f7ab-e89b3a16ee71, 'name': SearchDatastore_Task, 'duration_secs': 0.008846} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.361998] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 903.362269] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Processing image 2b099420-9152-4d93-9609-4c9317824c11 {{(pid=69367) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 903.362503] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 903.364509] env[69367]: DEBUG oslo_vmware.api [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Waiting for the task: (returnval){ [ 903.364509] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]529a05fe-c01d-3ad3-4704-6e5d5bedb590" [ 903.364509] env[69367]: _type = "Task" [ 903.364509] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.374574] env[69367]: DEBUG oslo_vmware.api [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]529a05fe-c01d-3ad3-4704-6e5d5bedb590, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.392636] env[69367]: DEBUG oslo_concurrency.lockutils [req-f6eb513d-7d17-4125-8839-46bcfc058d78 req-4a6b6b37-105d-4f21-9e35-9633b7f9d06e service nova] Releasing lock "refresh_cache-652e2e23-7927-46ce-b8af-fffdb6ac8a3e" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 903.407401] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a4fd745-98e8-435b-873f-845604e0b8b6 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.414885] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fffcca0-cdd3-4bd2-8e3e-7e23d8c9b047 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.447922] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-da296458-c371-45ea-93fe-93d2a9f76a86 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Powering on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 903.448671] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e291babf-eabc-4f47-9761-1d75afb2549e {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.451407] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4e61467f-99d0-496f-b672-83e279e6d649 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.459544] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae06865a-a2e4-46c7-9365-44ea12012ba3 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.463296] env[69367]: DEBUG oslo_vmware.api [None req-da296458-c371-45ea-93fe-93d2a9f76a86 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Waiting for the task: (returnval){ [ 903.463296] env[69367]: value = "task-4234122" [ 903.463296] env[69367]: _type = "Task" [ 903.463296] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.474600] env[69367]: DEBUG nova.compute.provider_tree [None req-a6fb10fd-5288-49e0-b35b-8dedcaa9fd7f tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Inventory has not changed in ProviderTree for provider: 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 903.480640] env[69367]: DEBUG oslo_vmware.api [None req-da296458-c371-45ea-93fe-93d2a9f76a86 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': task-4234122, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.619542] env[69367]: DEBUG oslo_concurrency.lockutils [req-b76b1679-da6d-44d3-9912-82a0f38b6b94 req-04e13eaa-9ef6-48c2-b780-822ab3b90b5e service nova] Releasing lock "refresh_cache-f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 903.763666] env[69367]: DEBUG oslo_concurrency.lockutils [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Acquiring lock "refresh_cache-42db60d9-e5f7-4925-8f6f-d3884687414a" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 903.763797] env[69367]: DEBUG oslo_concurrency.lockutils [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Acquired lock "refresh_cache-42db60d9-e5f7-4925-8f6f-d3884687414a" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 903.763955] env[69367]: DEBUG nova.network.neutron [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 42db60d9-e5f7-4925-8f6f-d3884687414a] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 903.884975] env[69367]: DEBUG oslo_vmware.api [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]529a05fe-c01d-3ad3-4704-6e5d5bedb590, 'name': SearchDatastore_Task, 'duration_secs': 0.009338} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.886189] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-050bf567-2d29-400b-a613-060617cdb5e9 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.894693] env[69367]: DEBUG oslo_vmware.api [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Waiting for the task: (returnval){ [ 903.894693] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52adf7bb-eb5c-0282-f19e-83e1d9665118" [ 903.894693] env[69367]: _type = "Task" [ 903.894693] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.906346] env[69367]: DEBUG oslo_vmware.api [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52adf7bb-eb5c-0282-f19e-83e1d9665118, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.974080] env[69367]: DEBUG oslo_vmware.api [None req-da296458-c371-45ea-93fe-93d2a9f76a86 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': task-4234122, 'name': PowerOnVM_Task, 'duration_secs': 0.428908} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.974386] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-da296458-c371-45ea-93fe-93d2a9f76a86 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Powered on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 903.974649] env[69367]: DEBUG nova.compute.manager [None req-da296458-c371-45ea-93fe-93d2a9f76a86 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Checking state {{(pid=69367) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 903.975475] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3d3d197-325b-49df-aa29-4e460e10d4f8 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.978694] env[69367]: DEBUG nova.scheduler.client.report [None req-a6fb10fd-5288-49e0-b35b-8dedcaa9fd7f tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Inventory has not changed for provider 19ddf8be-7305-4f70-8366-52a9957232e6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 904.306649] env[69367]: DEBUG nova.network.neutron [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 42db60d9-e5f7-4925-8f6f-d3884687414a] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 904.407268] env[69367]: DEBUG oslo_vmware.api [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52adf7bb-eb5c-0282-f19e-83e1d9665118, 'name': SearchDatastore_Task, 'duration_secs': 0.014112} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.411219] env[69367]: DEBUG oslo_concurrency.lockutils [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 904.411755] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore2] f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3/f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 904.412145] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 904.412484] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 904.412826] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-994474de-7f5d-4fd8-9bc5-de08013c3a2a {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.416269] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9f4fae53-b036-4fcf-b98e-c31c322c460e {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.425776] env[69367]: DEBUG oslo_vmware.api [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Waiting for the task: (returnval){ [ 904.425776] env[69367]: value = "task-4234123" [ 904.425776] env[69367]: _type = "Task" [ 904.425776] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.432624] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 904.432943] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69367) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 904.434117] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8b658003-1f3b-4338-8893-778717e54ab7 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.445632] env[69367]: DEBUG oslo_vmware.api [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Task: {'id': task-4234123, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.447596] env[69367]: DEBUG oslo_vmware.api [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Waiting for the task: (returnval){ [ 904.447596] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]5229b58d-68d0-0657-271d-bd3982b6be42" [ 904.447596] env[69367]: _type = "Task" [ 904.447596] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.453657] env[69367]: DEBUG nova.network.neutron [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 42db60d9-e5f7-4925-8f6f-d3884687414a] Updating instance_info_cache with network_info: [{"id": "1daae92e-1898-467c-be43-e8f27bff4242", "address": "fa:16:3e:b1:9b:f0", "network": {"id": "a8e5c1a6-2526-4042-8a8d-fdb54dbe7bf9", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1429177141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f5bc3d470905412ea72a8eedb98e9e47", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e6f11c0d-c73a-47f5-b02e-47bff48da0e4", "external-id": "nsx-vlan-transportzone-345", "segmentation_id": 345, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1daae92e-18", "ovs_interfaceid": "1daae92e-1898-467c-be43-e8f27bff4242", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 904.461327] env[69367]: DEBUG oslo_vmware.api [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]5229b58d-68d0-0657-271d-bd3982b6be42, 'name': SearchDatastore_Task, 'duration_secs': 0.00905} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.462106] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-73c6d57a-23b0-4a2d-a0aa-5d614fc786af {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.469054] env[69367]: DEBUG oslo_vmware.api [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Waiting for the task: (returnval){ [ 904.469054] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52aa1549-246a-8b67-48d3-fc2e754b437c" [ 904.469054] env[69367]: _type = "Task" [ 904.469054] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.479055] env[69367]: DEBUG oslo_vmware.api [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52aa1549-246a-8b67-48d3-fc2e754b437c, 'name': SearchDatastore_Task, 'duration_secs': 0.009039} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.479315] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 904.479609] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore2] 652e2e23-7927-46ce-b8af-fffdb6ac8a3e/652e2e23-7927-46ce-b8af-fffdb6ac8a3e.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 904.480127] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-925ad2af-6798-4003-a8ca-5e01097c4464 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.484523] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a6fb10fd-5288-49e0-b35b-8dedcaa9fd7f tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.969s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 904.488216] env[69367]: DEBUG oslo_concurrency.lockutils [None req-9920216c-2ae6-4fb7-81d0-fbadfd2aa9c5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.390s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 904.488216] env[69367]: DEBUG nova.objects.instance [None req-9920216c-2ae6-4fb7-81d0-fbadfd2aa9c5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Lazy-loading 'resources' on Instance uuid d900df05-b65c-4a45-94d1-563afbf9c022 {{(pid=69367) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 904.490861] env[69367]: DEBUG oslo_vmware.api [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Waiting for the task: (returnval){ [ 904.490861] env[69367]: value = "task-4234124" [ 904.490861] env[69367]: _type = "Task" [ 904.490861] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.505626] env[69367]: DEBUG oslo_vmware.api [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Task: {'id': task-4234124, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.509874] env[69367]: INFO nova.scheduler.client.report [None req-a6fb10fd-5288-49e0-b35b-8dedcaa9fd7f tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Deleted allocations for instance 46b6bc45-57f0-4850-9249-6bbb22b162c6 [ 904.621822] env[69367]: DEBUG nova.compute.manager [req-e11d9351-f881-457e-83a3-56d5543fc879 req-529d28dc-e816-4fab-b4e5-06a35a9a49b5 service nova] [instance: 42db60d9-e5f7-4925-8f6f-d3884687414a] Received event network-vif-plugged-1daae92e-1898-467c-be43-e8f27bff4242 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 904.622390] env[69367]: DEBUG oslo_concurrency.lockutils [req-e11d9351-f881-457e-83a3-56d5543fc879 req-529d28dc-e816-4fab-b4e5-06a35a9a49b5 service nova] Acquiring lock "42db60d9-e5f7-4925-8f6f-d3884687414a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 904.622736] env[69367]: DEBUG oslo_concurrency.lockutils [req-e11d9351-f881-457e-83a3-56d5543fc879 req-529d28dc-e816-4fab-b4e5-06a35a9a49b5 service nova] Lock "42db60d9-e5f7-4925-8f6f-d3884687414a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 904.622987] env[69367]: DEBUG oslo_concurrency.lockutils [req-e11d9351-f881-457e-83a3-56d5543fc879 req-529d28dc-e816-4fab-b4e5-06a35a9a49b5 service nova] Lock "42db60d9-e5f7-4925-8f6f-d3884687414a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 904.623271] env[69367]: DEBUG nova.compute.manager [req-e11d9351-f881-457e-83a3-56d5543fc879 req-529d28dc-e816-4fab-b4e5-06a35a9a49b5 service nova] [instance: 42db60d9-e5f7-4925-8f6f-d3884687414a] No waiting events found dispatching network-vif-plugged-1daae92e-1898-467c-be43-e8f27bff4242 {{(pid=69367) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 904.623514] env[69367]: WARNING nova.compute.manager [req-e11d9351-f881-457e-83a3-56d5543fc879 req-529d28dc-e816-4fab-b4e5-06a35a9a49b5 service nova] [instance: 42db60d9-e5f7-4925-8f6f-d3884687414a] Received unexpected event network-vif-plugged-1daae92e-1898-467c-be43-e8f27bff4242 for instance with vm_state building and task_state spawning. [ 904.623744] env[69367]: DEBUG nova.compute.manager [req-e11d9351-f881-457e-83a3-56d5543fc879 req-529d28dc-e816-4fab-b4e5-06a35a9a49b5 service nova] [instance: 42db60d9-e5f7-4925-8f6f-d3884687414a] Received event network-changed-1daae92e-1898-467c-be43-e8f27bff4242 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 904.623962] env[69367]: DEBUG nova.compute.manager [req-e11d9351-f881-457e-83a3-56d5543fc879 req-529d28dc-e816-4fab-b4e5-06a35a9a49b5 service nova] [instance: 42db60d9-e5f7-4925-8f6f-d3884687414a] Refreshing instance network info cache due to event network-changed-1daae92e-1898-467c-be43-e8f27bff4242. {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11772}} [ 904.624222] env[69367]: DEBUG oslo_concurrency.lockutils [req-e11d9351-f881-457e-83a3-56d5543fc879 req-529d28dc-e816-4fab-b4e5-06a35a9a49b5 service nova] Acquiring lock "refresh_cache-42db60d9-e5f7-4925-8f6f-d3884687414a" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 904.939230] env[69367]: DEBUG oslo_vmware.api [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Task: {'id': task-4234123, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.497015} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.939618] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore2] f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3/f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 904.939833] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] Extending root virtual disk to 1048576 {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 904.940120] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-97e64a57-6e91-41f4-b237-59d4b5542237 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.949073] env[69367]: DEBUG oslo_vmware.api [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Waiting for the task: (returnval){ [ 904.949073] env[69367]: value = "task-4234125" [ 904.949073] env[69367]: _type = "Task" [ 904.949073] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.956782] env[69367]: DEBUG oslo_concurrency.lockutils [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Releasing lock "refresh_cache-42db60d9-e5f7-4925-8f6f-d3884687414a" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 904.957334] env[69367]: DEBUG nova.compute.manager [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 42db60d9-e5f7-4925-8f6f-d3884687414a] Instance network_info: |[{"id": "1daae92e-1898-467c-be43-e8f27bff4242", "address": "fa:16:3e:b1:9b:f0", "network": {"id": "a8e5c1a6-2526-4042-8a8d-fdb54dbe7bf9", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1429177141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f5bc3d470905412ea72a8eedb98e9e47", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e6f11c0d-c73a-47f5-b02e-47bff48da0e4", "external-id": "nsx-vlan-transportzone-345", "segmentation_id": 345, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1daae92e-18", "ovs_interfaceid": "1daae92e-1898-467c-be43-e8f27bff4242", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69367) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 904.961393] env[69367]: DEBUG oslo_concurrency.lockutils [req-e11d9351-f881-457e-83a3-56d5543fc879 req-529d28dc-e816-4fab-b4e5-06a35a9a49b5 service nova] Acquired lock "refresh_cache-42db60d9-e5f7-4925-8f6f-d3884687414a" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 904.961593] env[69367]: DEBUG nova.network.neutron [req-e11d9351-f881-457e-83a3-56d5543fc879 req-529d28dc-e816-4fab-b4e5-06a35a9a49b5 service nova] [instance: 42db60d9-e5f7-4925-8f6f-d3884687414a] Refreshing network info cache for port 1daae92e-1898-467c-be43-e8f27bff4242 {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 904.963186] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 42db60d9-e5f7-4925-8f6f-d3884687414a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b1:9b:f0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e6f11c0d-c73a-47f5-b02e-47bff48da0e4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1daae92e-1898-467c-be43-e8f27bff4242', 'vif_model': 'vmxnet3'}] {{(pid=69367) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 904.971056] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 904.971320] env[69367]: DEBUG oslo_vmware.api [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Task: {'id': task-4234125, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.971851] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 42db60d9-e5f7-4925-8f6f-d3884687414a] Creating VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 904.973487] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9ad60cd4-8ef2-45eb-8729-f57b974d4a87 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.000601] env[69367]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 905.000601] env[69367]: value = "task-4234126" [ 905.000601] env[69367]: _type = "Task" [ 905.000601] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.008895] env[69367]: DEBUG oslo_vmware.api [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Task: {'id': task-4234124, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.017834] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234126, 'name': CreateVM_Task} progress is 5%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.021289] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a6fb10fd-5288-49e0-b35b-8dedcaa9fd7f tempest-ListServersNegativeTestJSON-1734614987 tempest-ListServersNegativeTestJSON-1734614987-project-member] Lock "46b6bc45-57f0-4850-9249-6bbb22b162c6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.010s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 905.423573] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70820e47-2e99-4382-83e5-f2f62c3fb5e8 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.433223] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-971b5695-9e2a-433f-aaae-af8211463e2b {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.467151] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3c3c34d-7f9b-4891-aff7-54daeecb4f28 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.476904] env[69367]: DEBUG oslo_vmware.api [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Task: {'id': task-4234125, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.147684} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.479112] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] Extended root virtual disk {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 905.480242] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12de7e0b-0dc7-4422-b1dc-f43aa32a8d68 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.483668] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04508cfb-3aa7-43fb-83ba-57dfdaf3d604 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.500201] env[69367]: DEBUG nova.compute.provider_tree [None req-9920216c-2ae6-4fb7-81d0-fbadfd2aa9c5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 905.521543] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] Reconfiguring VM instance instance-0000005d to attach disk [datastore2] f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3/f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 905.527924] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-04520451-aaa1-4f9e-9102-8c6ee8253a9c {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.550878] env[69367]: DEBUG oslo_vmware.api [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Task: {'id': task-4234124, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.812077} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.554947] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore2] 652e2e23-7927-46ce-b8af-fffdb6ac8a3e/652e2e23-7927-46ce-b8af-fffdb6ac8a3e.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 905.555189] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Extending root virtual disk to 1048576 {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 905.555514] env[69367]: DEBUG oslo_vmware.api [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Waiting for the task: (returnval){ [ 905.555514] env[69367]: value = "task-4234127" [ 905.555514] env[69367]: _type = "Task" [ 905.555514] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.555669] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234126, 'name': CreateVM_Task, 'duration_secs': 0.495085} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.555859] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-523944aa-c819-4149-8fda-a34b357a357a {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.557745] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 42db60d9-e5f7-4925-8f6f-d3884687414a] Created VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 905.560966] env[69367]: DEBUG oslo_concurrency.lockutils [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 905.561145] env[69367]: DEBUG oslo_concurrency.lockutils [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 905.561460] env[69367]: DEBUG oslo_concurrency.lockutils [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 905.562657] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a042ddb4-3f31-4bf9-946f-358c82936133 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.565018] env[69367]: ERROR nova.scheduler.client.report [None req-9920216c-2ae6-4fb7-81d0-fbadfd2aa9c5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] [req-86fef5a8-a297-4ce6-b89e-dfb08cc23934] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-86fef5a8-a297-4ce6-b89e-dfb08cc23934"}]} [ 905.565352] env[69367]: DEBUG oslo_concurrency.lockutils [None req-9920216c-2ae6-4fb7-81d0-fbadfd2aa9c5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.077s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 905.565954] env[69367]: ERROR nova.compute.manager [None req-9920216c-2ae6-4fb7-81d0-fbadfd2aa9c5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] [instance: d900df05-b65c-4a45-94d1-563afbf9c022] Setting instance vm_state to ERROR: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 905.565954] env[69367]: ERROR nova.compute.manager [instance: d900df05-b65c-4a45-94d1-563afbf9c022] Traceback (most recent call last): [ 905.565954] env[69367]: ERROR nova.compute.manager [instance: d900df05-b65c-4a45-94d1-563afbf9c022] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 905.565954] env[69367]: ERROR nova.compute.manager [instance: d900df05-b65c-4a45-94d1-563afbf9c022] yield [ 905.565954] env[69367]: ERROR nova.compute.manager [instance: d900df05-b65c-4a45-94d1-563afbf9c022] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 905.565954] env[69367]: ERROR nova.compute.manager [instance: d900df05-b65c-4a45-94d1-563afbf9c022] self.set_inventory_for_provider( [ 905.565954] env[69367]: ERROR nova.compute.manager [instance: d900df05-b65c-4a45-94d1-563afbf9c022] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 905.565954] env[69367]: ERROR nova.compute.manager [instance: d900df05-b65c-4a45-94d1-563afbf9c022] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 905.565954] env[69367]: ERROR nova.compute.manager [instance: d900df05-b65c-4a45-94d1-563afbf9c022] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-86fef5a8-a297-4ce6-b89e-dfb08cc23934"}]} [ 905.565954] env[69367]: ERROR nova.compute.manager [instance: d900df05-b65c-4a45-94d1-563afbf9c022] [ 905.565954] env[69367]: ERROR nova.compute.manager [instance: d900df05-b65c-4a45-94d1-563afbf9c022] During handling of the above exception, another exception occurred: [ 905.565954] env[69367]: ERROR nova.compute.manager [instance: d900df05-b65c-4a45-94d1-563afbf9c022] [ 905.565954] env[69367]: ERROR nova.compute.manager [instance: d900df05-b65c-4a45-94d1-563afbf9c022] Traceback (most recent call last): [ 905.565954] env[69367]: ERROR nova.compute.manager [instance: d900df05-b65c-4a45-94d1-563afbf9c022] File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 905.565954] env[69367]: ERROR nova.compute.manager [instance: d900df05-b65c-4a45-94d1-563afbf9c022] self._delete_instance(context, instance, bdms) [ 905.565954] env[69367]: ERROR nova.compute.manager [instance: d900df05-b65c-4a45-94d1-563afbf9c022] File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 905.565954] env[69367]: ERROR nova.compute.manager [instance: d900df05-b65c-4a45-94d1-563afbf9c022] self._complete_deletion(context, instance) [ 905.565954] env[69367]: ERROR nova.compute.manager [instance: d900df05-b65c-4a45-94d1-563afbf9c022] File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 905.565954] env[69367]: ERROR nova.compute.manager [instance: d900df05-b65c-4a45-94d1-563afbf9c022] self._update_resource_tracker(context, instance) [ 905.565954] env[69367]: ERROR nova.compute.manager [instance: d900df05-b65c-4a45-94d1-563afbf9c022] File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 905.565954] env[69367]: ERROR nova.compute.manager [instance: d900df05-b65c-4a45-94d1-563afbf9c022] self.rt.update_usage(context, instance, instance.node) [ 905.565954] env[69367]: ERROR nova.compute.manager [instance: d900df05-b65c-4a45-94d1-563afbf9c022] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 905.565954] env[69367]: ERROR nova.compute.manager [instance: d900df05-b65c-4a45-94d1-563afbf9c022] return f(*args, **kwargs) [ 905.565954] env[69367]: ERROR nova.compute.manager [instance: d900df05-b65c-4a45-94d1-563afbf9c022] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 905.565954] env[69367]: ERROR nova.compute.manager [instance: d900df05-b65c-4a45-94d1-563afbf9c022] self._update(context.elevated(), self.compute_nodes[nodename]) [ 905.565954] env[69367]: ERROR nova.compute.manager [instance: d900df05-b65c-4a45-94d1-563afbf9c022] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 905.565954] env[69367]: ERROR nova.compute.manager [instance: d900df05-b65c-4a45-94d1-563afbf9c022] self._update_to_placement(context, compute_node, startup) [ 905.565954] env[69367]: ERROR nova.compute.manager [instance: d900df05-b65c-4a45-94d1-563afbf9c022] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 905.565954] env[69367]: ERROR nova.compute.manager [instance: d900df05-b65c-4a45-94d1-563afbf9c022] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 905.565954] env[69367]: ERROR nova.compute.manager [instance: d900df05-b65c-4a45-94d1-563afbf9c022] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 905.565954] env[69367]: ERROR nova.compute.manager [instance: d900df05-b65c-4a45-94d1-563afbf9c022] return attempt.get(self._wrap_exception) [ 905.565954] env[69367]: ERROR nova.compute.manager [instance: d900df05-b65c-4a45-94d1-563afbf9c022] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 905.565954] env[69367]: ERROR nova.compute.manager [instance: d900df05-b65c-4a45-94d1-563afbf9c022] six.reraise(self.value[0], self.value[1], self.value[2]) [ 905.565954] env[69367]: ERROR nova.compute.manager [instance: d900df05-b65c-4a45-94d1-563afbf9c022] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 905.567157] env[69367]: ERROR nova.compute.manager [instance: d900df05-b65c-4a45-94d1-563afbf9c022] raise value [ 905.567157] env[69367]: ERROR nova.compute.manager [instance: d900df05-b65c-4a45-94d1-563afbf9c022] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 905.567157] env[69367]: ERROR nova.compute.manager [instance: d900df05-b65c-4a45-94d1-563afbf9c022] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 905.567157] env[69367]: ERROR nova.compute.manager [instance: d900df05-b65c-4a45-94d1-563afbf9c022] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 905.567157] env[69367]: ERROR nova.compute.manager [instance: d900df05-b65c-4a45-94d1-563afbf9c022] self.reportclient.update_from_provider_tree( [ 905.567157] env[69367]: ERROR nova.compute.manager [instance: d900df05-b65c-4a45-94d1-563afbf9c022] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 905.567157] env[69367]: ERROR nova.compute.manager [instance: d900df05-b65c-4a45-94d1-563afbf9c022] with catch_all(pd.uuid): [ 905.567157] env[69367]: ERROR nova.compute.manager [instance: d900df05-b65c-4a45-94d1-563afbf9c022] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 905.567157] env[69367]: ERROR nova.compute.manager [instance: d900df05-b65c-4a45-94d1-563afbf9c022] self.gen.throw(typ, value, traceback) [ 905.567157] env[69367]: ERROR nova.compute.manager [instance: d900df05-b65c-4a45-94d1-563afbf9c022] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 905.567157] env[69367]: ERROR nova.compute.manager [instance: d900df05-b65c-4a45-94d1-563afbf9c022] raise exception.ResourceProviderSyncFailed() [ 905.567157] env[69367]: ERROR nova.compute.manager [instance: d900df05-b65c-4a45-94d1-563afbf9c022] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 905.567157] env[69367]: ERROR nova.compute.manager [instance: d900df05-b65c-4a45-94d1-563afbf9c022] [ 905.568597] env[69367]: DEBUG oslo_concurrency.lockutils [None req-4be840a9-c4f6-4802-be1a-8c2336cb67fa tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.577s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 905.570082] env[69367]: INFO nova.compute.claims [None req-4be840a9-c4f6-4802-be1a-8c2336cb67fa tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: b5786197-8ba8-44e1-ac01-2c9837ca5ec6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 905.577866] env[69367]: DEBUG oslo_vmware.api [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Waiting for the task: (returnval){ [ 905.577866] env[69367]: value = "task-4234128" [ 905.577866] env[69367]: _type = "Task" [ 905.577866] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.578148] env[69367]: DEBUG oslo_vmware.api [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Task: {'id': task-4234127, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.583755] env[69367]: DEBUG oslo_vmware.api [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Waiting for the task: (returnval){ [ 905.583755] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]5237fa83-20df-3874-5d2a-169d4671e8a5" [ 905.583755] env[69367]: _type = "Task" [ 905.583755] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.592358] env[69367]: DEBUG oslo_vmware.api [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Task: {'id': task-4234128, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.598631] env[69367]: DEBUG oslo_vmware.api [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]5237fa83-20df-3874-5d2a-169d4671e8a5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.832066] env[69367]: DEBUG nova.network.neutron [req-e11d9351-f881-457e-83a3-56d5543fc879 req-529d28dc-e816-4fab-b4e5-06a35a9a49b5 service nova] [instance: 42db60d9-e5f7-4925-8f6f-d3884687414a] Updated VIF entry in instance network info cache for port 1daae92e-1898-467c-be43-e8f27bff4242. {{(pid=69367) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 905.832544] env[69367]: DEBUG nova.network.neutron [req-e11d9351-f881-457e-83a3-56d5543fc879 req-529d28dc-e816-4fab-b4e5-06a35a9a49b5 service nova] [instance: 42db60d9-e5f7-4925-8f6f-d3884687414a] Updating instance_info_cache with network_info: [{"id": "1daae92e-1898-467c-be43-e8f27bff4242", "address": "fa:16:3e:b1:9b:f0", "network": {"id": "a8e5c1a6-2526-4042-8a8d-fdb54dbe7bf9", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1429177141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f5bc3d470905412ea72a8eedb98e9e47", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e6f11c0d-c73a-47f5-b02e-47bff48da0e4", "external-id": "nsx-vlan-transportzone-345", "segmentation_id": 345, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1daae92e-18", "ovs_interfaceid": "1daae92e-1898-467c-be43-e8f27bff4242", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 906.071975] env[69367]: DEBUG oslo_vmware.api [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Task: {'id': task-4234127, 'name': ReconfigVM_Task, 'duration_secs': 0.274461} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.072377] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] Reconfigured VM instance instance-0000005d to attach disk [datastore2] f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3/f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 906.073947] env[69367]: DEBUG oslo_concurrency.lockutils [None req-9920216c-2ae6-4fb7-81d0-fbadfd2aa9c5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Lock "d900df05-b65c-4a45-94d1-563afbf9c022" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.881s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 906.075516] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a0f2925f-c9f1-4a66-b2fd-5458e4731dc7 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.099193] env[69367]: DEBUG oslo_vmware.api [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Waiting for the task: (returnval){ [ 906.099193] env[69367]: value = "task-4234129" [ 906.099193] env[69367]: _type = "Task" [ 906.099193] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.103051] env[69367]: DEBUG oslo_vmware.api [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Task: {'id': task-4234128, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.193215} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.107259] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Extended root virtual disk {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 906.111729] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a957158-7938-42a7-8507-6b2ac22a9a37 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.114781] env[69367]: DEBUG oslo_vmware.api [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]5237fa83-20df-3874-5d2a-169d4671e8a5, 'name': SearchDatastore_Task, 'duration_secs': 0.020033} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.115560] env[69367]: DEBUG oslo_concurrency.lockutils [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 906.116293] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 42db60d9-e5f7-4925-8f6f-d3884687414a] Processing image 2b099420-9152-4d93-9609-4c9317824c11 {{(pid=69367) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 906.116293] env[69367]: DEBUG oslo_concurrency.lockutils [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 906.116293] env[69367]: DEBUG oslo_concurrency.lockutils [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 906.116781] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 906.117181] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d868c3ae-1c28-45c9-b2d1-1651a5426acc {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.151138] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Reconfiguring VM instance instance-0000005c to attach disk [datastore2] 652e2e23-7927-46ce-b8af-fffdb6ac8a3e/652e2e23-7927-46ce-b8af-fffdb6ac8a3e.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 906.151580] env[69367]: DEBUG oslo_vmware.api [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Task: {'id': task-4234129, 'name': Rename_Task} progress is 14%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.152450] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b26f72aa-9acd-4bcf-b252-78c70de6c8f7 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.174249] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 906.174538] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69367) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 906.176660] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f7f00c2-a0c6-4d38-8fc4-8576d1e0d5d4 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.184184] env[69367]: DEBUG oslo_vmware.api [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Waiting for the task: (returnval){ [ 906.184184] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]525d330c-00f9-2ab5-4461-cb44cb45089e" [ 906.184184] env[69367]: _type = "Task" [ 906.184184] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.184523] env[69367]: DEBUG oslo_vmware.api [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Waiting for the task: (returnval){ [ 906.184523] env[69367]: value = "task-4234130" [ 906.184523] env[69367]: _type = "Task" [ 906.184523] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.200024] env[69367]: DEBUG oslo_vmware.api [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Task: {'id': task-4234130, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.201883] env[69367]: DEBUG oslo_vmware.api [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]525d330c-00f9-2ab5-4461-cb44cb45089e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.335492] env[69367]: DEBUG oslo_concurrency.lockutils [req-e11d9351-f881-457e-83a3-56d5543fc879 req-529d28dc-e816-4fab-b4e5-06a35a9a49b5 service nova] Releasing lock "refresh_cache-42db60d9-e5f7-4925-8f6f-d3884687414a" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 906.613762] env[69367]: DEBUG oslo_vmware.api [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Task: {'id': task-4234129, 'name': Rename_Task, 'duration_secs': 0.165482} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.614866] env[69367]: DEBUG nova.scheduler.client.report [None req-4be840a9-c4f6-4802-be1a-8c2336cb67fa tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 906.616862] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] Powering on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 906.617366] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-eddd1eb9-c008-4c6a-9e8f-4277f33a303a {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.624970] env[69367]: DEBUG oslo_vmware.api [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Waiting for the task: (returnval){ [ 906.624970] env[69367]: value = "task-4234131" [ 906.624970] env[69367]: _type = "Task" [ 906.624970] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.631280] env[69367]: DEBUG nova.scheduler.client.report [None req-4be840a9-c4f6-4802-be1a-8c2336cb67fa tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 906.631521] env[69367]: DEBUG nova.compute.provider_tree [None req-4be840a9-c4f6-4802-be1a-8c2336cb67fa tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 906.636805] env[69367]: DEBUG oslo_vmware.api [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Task: {'id': task-4234131, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.646658] env[69367]: DEBUG nova.scheduler.client.report [None req-4be840a9-c4f6-4802-be1a-8c2336cb67fa tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 906.677425] env[69367]: DEBUG nova.scheduler.client.report [None req-4be840a9-c4f6-4802-be1a-8c2336cb67fa tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 906.705183] env[69367]: DEBUG oslo_vmware.api [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Task: {'id': task-4234130, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.705560] env[69367]: DEBUG oslo_vmware.api [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]525d330c-00f9-2ab5-4461-cb44cb45089e, 'name': SearchDatastore_Task, 'duration_secs': 0.01533} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.709819] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e1c51ab-e5ae-4764-8603-08545fb71049 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.716795] env[69367]: DEBUG oslo_vmware.api [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Waiting for the task: (returnval){ [ 906.716795] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52cff5cf-4d15-38ee-f785-2ee680881dcc" [ 906.716795] env[69367]: _type = "Task" [ 906.716795] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.729313] env[69367]: DEBUG oslo_vmware.api [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52cff5cf-4d15-38ee-f785-2ee680881dcc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.125422] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44c4485f-c478-43a9-884e-f06983487ffd {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.142407] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8ce27ee-de40-431a-81c5-d4ff140bf1b5 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.146098] env[69367]: DEBUG oslo_vmware.api [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Task: {'id': task-4234131, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.184690] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f473c3a-1930-45a5-81ce-3e9620552200 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.203072] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abac5c79-02ee-4add-b62e-44349a6a6c21 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.207933] env[69367]: DEBUG oslo_vmware.api [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Task: {'id': task-4234130, 'name': ReconfigVM_Task, 'duration_secs': 0.572746} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.210522] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Reconfigured VM instance instance-0000005c to attach disk [datastore2] 652e2e23-7927-46ce-b8af-fffdb6ac8a3e/652e2e23-7927-46ce-b8af-fffdb6ac8a3e.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 907.210522] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7396f8d9-9fce-4fde-8702-843c8bb7933d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.221443] env[69367]: DEBUG nova.compute.provider_tree [None req-4be840a9-c4f6-4802-be1a-8c2336cb67fa tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 907.227245] env[69367]: DEBUG oslo_vmware.api [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Waiting for the task: (returnval){ [ 907.227245] env[69367]: value = "task-4234132" [ 907.227245] env[69367]: _type = "Task" [ 907.227245] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.234826] env[69367]: DEBUG oslo_vmware.api [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52cff5cf-4d15-38ee-f785-2ee680881dcc, 'name': SearchDatastore_Task, 'duration_secs': 0.012729} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.236225] env[69367]: DEBUG oslo_concurrency.lockutils [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 907.236757] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore2] 42db60d9-e5f7-4925-8f6f-d3884687414a/42db60d9-e5f7-4925-8f6f-d3884687414a.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 907.237124] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-92bb6cd9-008c-484c-939b-c20151849c09 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.243016] env[69367]: DEBUG oslo_vmware.api [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Task: {'id': task-4234132, 'name': Rename_Task} progress is 14%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.251393] env[69367]: DEBUG oslo_vmware.api [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Waiting for the task: (returnval){ [ 907.251393] env[69367]: value = "task-4234133" [ 907.251393] env[69367]: _type = "Task" [ 907.251393] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.261019] env[69367]: DEBUG oslo_vmware.api [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Task: {'id': task-4234133, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.592414] env[69367]: DEBUG oslo_concurrency.lockutils [None req-9920216c-2ae6-4fb7-81d0-fbadfd2aa9c5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 907.638738] env[69367]: DEBUG oslo_vmware.api [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Task: {'id': task-4234131, 'name': PowerOnVM_Task, 'duration_secs': 0.52222} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.638999] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] Powered on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 907.639220] env[69367]: INFO nova.compute.manager [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] Took 7.67 seconds to spawn the instance on the hypervisor. [ 907.639419] env[69367]: DEBUG nova.compute.manager [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] Checking state {{(pid=69367) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 907.640267] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-086fc801-9189-449b-b3fd-8e093387ef95 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.743409] env[69367]: DEBUG oslo_vmware.api [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Task: {'id': task-4234132, 'name': Rename_Task, 'duration_secs': 0.203179} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.743816] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Powering on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 907.744105] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-017d7ffb-8a63-4cb1-aaae-43f367062bdd {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.752865] env[69367]: DEBUG oslo_vmware.api [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Waiting for the task: (returnval){ [ 907.752865] env[69367]: value = "task-4234134" [ 907.752865] env[69367]: _type = "Task" [ 907.752865] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.760687] env[69367]: ERROR nova.scheduler.client.report [None req-4be840a9-c4f6-4802-be1a-8c2336cb67fa tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [req-db455dac-12b6-40ca-95f5-ef4d9a2015b8] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-db455dac-12b6-40ca-95f5-ef4d9a2015b8"}]} [ 907.761526] env[69367]: DEBUG oslo_concurrency.lockutils [None req-4be840a9-c4f6-4802-be1a-8c2336cb67fa tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.193s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 907.761809] env[69367]: ERROR nova.compute.manager [None req-4be840a9-c4f6-4802-be1a-8c2336cb67fa tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: b5786197-8ba8-44e1-ac01-2c9837ca5ec6] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 907.761809] env[69367]: ERROR nova.compute.manager [instance: b5786197-8ba8-44e1-ac01-2c9837ca5ec6] Traceback (most recent call last): [ 907.761809] env[69367]: ERROR nova.compute.manager [instance: b5786197-8ba8-44e1-ac01-2c9837ca5ec6] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 907.761809] env[69367]: ERROR nova.compute.manager [instance: b5786197-8ba8-44e1-ac01-2c9837ca5ec6] yield [ 907.761809] env[69367]: ERROR nova.compute.manager [instance: b5786197-8ba8-44e1-ac01-2c9837ca5ec6] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 907.761809] env[69367]: ERROR nova.compute.manager [instance: b5786197-8ba8-44e1-ac01-2c9837ca5ec6] self.set_inventory_for_provider( [ 907.761809] env[69367]: ERROR nova.compute.manager [instance: b5786197-8ba8-44e1-ac01-2c9837ca5ec6] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 907.761809] env[69367]: ERROR nova.compute.manager [instance: b5786197-8ba8-44e1-ac01-2c9837ca5ec6] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 907.761809] env[69367]: ERROR nova.compute.manager [instance: b5786197-8ba8-44e1-ac01-2c9837ca5ec6] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-db455dac-12b6-40ca-95f5-ef4d9a2015b8"}]} [ 907.761809] env[69367]: ERROR nova.compute.manager [instance: b5786197-8ba8-44e1-ac01-2c9837ca5ec6] [ 907.761809] env[69367]: ERROR nova.compute.manager [instance: b5786197-8ba8-44e1-ac01-2c9837ca5ec6] During handling of the above exception, another exception occurred: [ 907.761809] env[69367]: ERROR nova.compute.manager [instance: b5786197-8ba8-44e1-ac01-2c9837ca5ec6] [ 907.761809] env[69367]: ERROR nova.compute.manager [instance: b5786197-8ba8-44e1-ac01-2c9837ca5ec6] Traceback (most recent call last): [ 907.761809] env[69367]: ERROR nova.compute.manager [instance: b5786197-8ba8-44e1-ac01-2c9837ca5ec6] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 907.761809] env[69367]: ERROR nova.compute.manager [instance: b5786197-8ba8-44e1-ac01-2c9837ca5ec6] with self.rt.instance_claim(context, instance, node, allocs, [ 907.761809] env[69367]: ERROR nova.compute.manager [instance: b5786197-8ba8-44e1-ac01-2c9837ca5ec6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 907.761809] env[69367]: ERROR nova.compute.manager [instance: b5786197-8ba8-44e1-ac01-2c9837ca5ec6] return f(*args, **kwargs) [ 907.761809] env[69367]: ERROR nova.compute.manager [instance: b5786197-8ba8-44e1-ac01-2c9837ca5ec6] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 907.761809] env[69367]: ERROR nova.compute.manager [instance: b5786197-8ba8-44e1-ac01-2c9837ca5ec6] self._update(elevated, cn) [ 907.761809] env[69367]: ERROR nova.compute.manager [instance: b5786197-8ba8-44e1-ac01-2c9837ca5ec6] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 907.761809] env[69367]: ERROR nova.compute.manager [instance: b5786197-8ba8-44e1-ac01-2c9837ca5ec6] self._update_to_placement(context, compute_node, startup) [ 907.761809] env[69367]: ERROR nova.compute.manager [instance: b5786197-8ba8-44e1-ac01-2c9837ca5ec6] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 907.761809] env[69367]: ERROR nova.compute.manager [instance: b5786197-8ba8-44e1-ac01-2c9837ca5ec6] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 907.761809] env[69367]: ERROR nova.compute.manager [instance: b5786197-8ba8-44e1-ac01-2c9837ca5ec6] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 907.761809] env[69367]: ERROR nova.compute.manager [instance: b5786197-8ba8-44e1-ac01-2c9837ca5ec6] return attempt.get(self._wrap_exception) [ 907.761809] env[69367]: ERROR nova.compute.manager [instance: b5786197-8ba8-44e1-ac01-2c9837ca5ec6] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 907.761809] env[69367]: ERROR nova.compute.manager [instance: b5786197-8ba8-44e1-ac01-2c9837ca5ec6] six.reraise(self.value[0], self.value[1], self.value[2]) [ 907.761809] env[69367]: ERROR nova.compute.manager [instance: b5786197-8ba8-44e1-ac01-2c9837ca5ec6] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 907.761809] env[69367]: ERROR nova.compute.manager [instance: b5786197-8ba8-44e1-ac01-2c9837ca5ec6] raise value [ 907.761809] env[69367]: ERROR nova.compute.manager [instance: b5786197-8ba8-44e1-ac01-2c9837ca5ec6] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 907.761809] env[69367]: ERROR nova.compute.manager [instance: b5786197-8ba8-44e1-ac01-2c9837ca5ec6] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 907.761809] env[69367]: ERROR nova.compute.manager [instance: b5786197-8ba8-44e1-ac01-2c9837ca5ec6] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 907.761809] env[69367]: ERROR nova.compute.manager [instance: b5786197-8ba8-44e1-ac01-2c9837ca5ec6] self.reportclient.update_from_provider_tree( [ 907.761809] env[69367]: ERROR nova.compute.manager [instance: b5786197-8ba8-44e1-ac01-2c9837ca5ec6] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 907.762840] env[69367]: ERROR nova.compute.manager [instance: b5786197-8ba8-44e1-ac01-2c9837ca5ec6] with catch_all(pd.uuid): [ 907.762840] env[69367]: ERROR nova.compute.manager [instance: b5786197-8ba8-44e1-ac01-2c9837ca5ec6] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 907.762840] env[69367]: ERROR nova.compute.manager [instance: b5786197-8ba8-44e1-ac01-2c9837ca5ec6] self.gen.throw(typ, value, traceback) [ 907.762840] env[69367]: ERROR nova.compute.manager [instance: b5786197-8ba8-44e1-ac01-2c9837ca5ec6] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 907.762840] env[69367]: ERROR nova.compute.manager [instance: b5786197-8ba8-44e1-ac01-2c9837ca5ec6] raise exception.ResourceProviderSyncFailed() [ 907.762840] env[69367]: ERROR nova.compute.manager [instance: b5786197-8ba8-44e1-ac01-2c9837ca5ec6] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 907.762840] env[69367]: ERROR nova.compute.manager [instance: b5786197-8ba8-44e1-ac01-2c9837ca5ec6] [ 907.762840] env[69367]: DEBUG nova.compute.utils [None req-4be840a9-c4f6-4802-be1a-8c2336cb67fa tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: b5786197-8ba8-44e1-ac01-2c9837ca5ec6] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 907.764488] env[69367]: DEBUG oslo_concurrency.lockutils [None req-4dc2f4c5-b530-43f5-82af-dd384731d332 tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.094s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 907.764701] env[69367]: DEBUG oslo_concurrency.lockutils [None req-4dc2f4c5-b530-43f5-82af-dd384731d332 tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 907.764869] env[69367]: INFO nova.compute.manager [None req-4dc2f4c5-b530-43f5-82af-dd384731d332 tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] [instance: 05aae150-5d86-4210-ae7e-8c63e83cb907] Successfully reverted task state from None on failure for instance. [ 907.767412] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5d288ef0-a540-4075-bd94-d745566f8b08 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.026s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 907.769626] env[69367]: INFO nova.compute.claims [None req-5d288ef0-a540-4075-bd94-d745566f8b08 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 1b57dbcb-527e-4142-8dbf-5622978a7c02] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 907.773761] env[69367]: ERROR oslo_messaging.rpc.server [None req-4dc2f4c5-b530-43f5-82af-dd384731d332 tempest-AttachInterfacesV270Test-1532147299 tempest-AttachInterfacesV270Test-1532147299-project-member] Exception during message handling: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 907.773761] env[69367]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 907.773761] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 907.773761] env[69367]: ERROR oslo_messaging.rpc.server yield [ 907.773761] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 907.773761] env[69367]: ERROR oslo_messaging.rpc.server self.set_inventory_for_provider( [ 907.773761] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 907.773761] env[69367]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 907.773761] env[69367]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-4e7bf7bc-e2f4-4391-9fd0-d971e94f8f3f"}]} [ 907.773761] env[69367]: ERROR oslo_messaging.rpc.server [ 907.773761] env[69367]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 907.773761] env[69367]: ERROR oslo_messaging.rpc.server [ 907.773761] env[69367]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 907.773761] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 174, in _process_incoming [ 907.773761] env[69367]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 907.773761] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 907.773761] env[69367]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 907.773761] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 907.773761] env[69367]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 907.773761] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 907.773761] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 907.773761] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 907.773761] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 907.773761] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 907.773761] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 907.773761] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 907.773761] env[69367]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 907.773761] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 167, in decorated_function [ 907.773761] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 907.773761] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 907.773761] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 907.773761] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 907.773761] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 907.773761] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 158, in decorated_function [ 907.773761] env[69367]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 907.773761] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1483, in decorated_function [ 907.773761] env[69367]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 907.773761] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 214, in decorated_function [ 907.773761] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 907.773761] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 907.773761] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 907.773761] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 907.773761] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 907.773761] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 204, in decorated_function [ 907.773761] env[69367]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 907.773761] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3388, in terminate_instance [ 907.773761] env[69367]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 907.775572] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 907.775572] env[69367]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 907.775572] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3383, in do_terminate_instance [ 907.775572] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 907.775572] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 907.775572] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 907.775572] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 907.775572] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 907.775572] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 907.775572] env[69367]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 907.775572] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 907.775572] env[69367]: ERROR oslo_messaging.rpc.server self._complete_deletion(context, instance) [ 907.775572] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 907.775572] env[69367]: ERROR oslo_messaging.rpc.server self._update_resource_tracker(context, instance) [ 907.775572] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 907.775572] env[69367]: ERROR oslo_messaging.rpc.server self.rt.update_usage(context, instance, instance.node) [ 907.775572] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 907.775572] env[69367]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 907.775572] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 907.775572] env[69367]: ERROR oslo_messaging.rpc.server self._update(context.elevated(), self.compute_nodes[nodename]) [ 907.775572] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 907.775572] env[69367]: ERROR oslo_messaging.rpc.server self._update_to_placement(context, compute_node, startup) [ 907.775572] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 907.775572] env[69367]: ERROR oslo_messaging.rpc.server return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 907.775572] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 907.775572] env[69367]: ERROR oslo_messaging.rpc.server return attempt.get(self._wrap_exception) [ 907.775572] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 907.775572] env[69367]: ERROR oslo_messaging.rpc.server six.reraise(self.value[0], self.value[1], self.value[2]) [ 907.775572] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 907.775572] env[69367]: ERROR oslo_messaging.rpc.server raise value [ 907.775572] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 907.775572] env[69367]: ERROR oslo_messaging.rpc.server attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 907.775572] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 907.775572] env[69367]: ERROR oslo_messaging.rpc.server self.reportclient.update_from_provider_tree( [ 907.775572] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 907.775572] env[69367]: ERROR oslo_messaging.rpc.server with catch_all(pd.uuid): [ 907.775572] env[69367]: ERROR oslo_messaging.rpc.server File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 907.775572] env[69367]: ERROR oslo_messaging.rpc.server self.gen.throw(typ, value, traceback) [ 907.775572] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 907.775572] env[69367]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderSyncFailed() [ 907.775572] env[69367]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 907.775572] env[69367]: ERROR oslo_messaging.rpc.server [ 907.778122] env[69367]: DEBUG nova.compute.manager [None req-4be840a9-c4f6-4802-be1a-8c2336cb67fa tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: b5786197-8ba8-44e1-ac01-2c9837ca5ec6] Build of instance b5786197-8ba8-44e1-ac01-2c9837ca5ec6 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 907.778652] env[69367]: DEBUG nova.compute.manager [None req-4be840a9-c4f6-4802-be1a-8c2336cb67fa tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: b5786197-8ba8-44e1-ac01-2c9837ca5ec6] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 907.778908] env[69367]: DEBUG oslo_concurrency.lockutils [None req-4be840a9-c4f6-4802-be1a-8c2336cb67fa tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Acquiring lock "refresh_cache-b5786197-8ba8-44e1-ac01-2c9837ca5ec6" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 907.779097] env[69367]: DEBUG oslo_concurrency.lockutils [None req-4be840a9-c4f6-4802-be1a-8c2336cb67fa tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Acquired lock "refresh_cache-b5786197-8ba8-44e1-ac01-2c9837ca5ec6" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 907.779272] env[69367]: DEBUG nova.network.neutron [None req-4be840a9-c4f6-4802-be1a-8c2336cb67fa tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: b5786197-8ba8-44e1-ac01-2c9837ca5ec6] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 907.781237] env[69367]: DEBUG oslo_vmware.api [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Task: {'id': task-4234133, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.787791] env[69367]: DEBUG oslo_vmware.api [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Task: {'id': task-4234134, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.164130] env[69367]: INFO nova.compute.manager [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] Took 31.12 seconds to build instance. [ 908.267484] env[69367]: DEBUG oslo_vmware.api [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Task: {'id': task-4234133, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.819231} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.271631] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore2] 42db60d9-e5f7-4925-8f6f-d3884687414a/42db60d9-e5f7-4925-8f6f-d3884687414a.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 908.271888] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 42db60d9-e5f7-4925-8f6f-d3884687414a] Extending root virtual disk to 1048576 {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 908.272341] env[69367]: DEBUG oslo_vmware.api [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Task: {'id': task-4234134, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.272580] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fc336672-fe84-48b4-9142-e58f8dc8244a {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.281734] env[69367]: DEBUG oslo_vmware.api [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Waiting for the task: (returnval){ [ 908.281734] env[69367]: value = "task-4234135" [ 908.281734] env[69367]: _type = "Task" [ 908.281734] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.298911] env[69367]: DEBUG oslo_vmware.api [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Task: {'id': task-4234135, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.449388] env[69367]: DEBUG nova.network.neutron [None req-4be840a9-c4f6-4802-be1a-8c2336cb67fa tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: b5786197-8ba8-44e1-ac01-2c9837ca5ec6] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 908.551300] env[69367]: DEBUG nova.network.neutron [None req-4be840a9-c4f6-4802-be1a-8c2336cb67fa tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: b5786197-8ba8-44e1-ac01-2c9837ca5ec6] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 908.668345] env[69367]: DEBUG oslo_concurrency.lockutils [None req-25b42554-9a7c-4b23-a267-bc1739181f2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Lock "f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.636s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 908.769910] env[69367]: DEBUG oslo_vmware.api [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Task: {'id': task-4234134, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.795067] env[69367]: DEBUG oslo_vmware.api [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Task: {'id': task-4234135, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.189825} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.796722] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 42db60d9-e5f7-4925-8f6f-d3884687414a] Extended root virtual disk {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 908.797823] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e4fe41d-1c8c-42b6-bb54-67529311d4a3 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.823169] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 42db60d9-e5f7-4925-8f6f-d3884687414a] Reconfiguring VM instance instance-0000005e to attach disk [datastore2] 42db60d9-e5f7-4925-8f6f-d3884687414a/42db60d9-e5f7-4925-8f6f-d3884687414a.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 908.824749] env[69367]: DEBUG nova.scheduler.client.report [None req-5d288ef0-a540-4075-bd94-d745566f8b08 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 908.827592] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1e81075e-3e0c-4b46-92db-8a2ba19b72f2 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.849589] env[69367]: DEBUG oslo_vmware.api [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Waiting for the task: (returnval){ [ 908.849589] env[69367]: value = "task-4234136" [ 908.849589] env[69367]: _type = "Task" [ 908.849589] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.859453] env[69367]: DEBUG oslo_vmware.api [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Task: {'id': task-4234136, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.860676] env[69367]: DEBUG nova.scheduler.client.report [None req-5d288ef0-a540-4075-bd94-d745566f8b08 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 908.860837] env[69367]: DEBUG nova.compute.provider_tree [None req-5d288ef0-a540-4075-bd94-d745566f8b08 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 908.874981] env[69367]: DEBUG nova.scheduler.client.report [None req-5d288ef0-a540-4075-bd94-d745566f8b08 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 908.897053] env[69367]: DEBUG nova.scheduler.client.report [None req-5d288ef0-a540-4075-bd94-d745566f8b08 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 909.054415] env[69367]: DEBUG oslo_concurrency.lockutils [None req-4be840a9-c4f6-4802-be1a-8c2336cb67fa tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Releasing lock "refresh_cache-b5786197-8ba8-44e1-ac01-2c9837ca5ec6" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 909.054415] env[69367]: DEBUG nova.compute.manager [None req-4be840a9-c4f6-4802-be1a-8c2336cb67fa tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 909.054767] env[69367]: DEBUG nova.compute.manager [None req-4be840a9-c4f6-4802-be1a-8c2336cb67fa tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: b5786197-8ba8-44e1-ac01-2c9837ca5ec6] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 909.054767] env[69367]: DEBUG nova.network.neutron [None req-4be840a9-c4f6-4802-be1a-8c2336cb67fa tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: b5786197-8ba8-44e1-ac01-2c9837ca5ec6] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 909.091482] env[69367]: DEBUG nova.network.neutron [None req-4be840a9-c4f6-4802-be1a-8c2336cb67fa tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: b5786197-8ba8-44e1-ac01-2c9837ca5ec6] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 909.271576] env[69367]: DEBUG oslo_vmware.api [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Task: {'id': task-4234134, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.335632] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6f10bfc-d23b-431c-94a7-d5789490e0f7 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.344720] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9ce13f7-cd6b-4c7f-9e0a-9b0f54ada5af {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.383102] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b08f29dc-d87e-4430-84fc-6d6c6f08405d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.390412] env[69367]: DEBUG oslo_vmware.api [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Task: {'id': task-4234136, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.396135] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c13fe37-80af-4aa7-8f1b-299124835e6d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.411998] env[69367]: DEBUG nova.compute.provider_tree [None req-5d288ef0-a540-4075-bd94-d745566f8b08 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 909.594270] env[69367]: DEBUG nova.network.neutron [None req-4be840a9-c4f6-4802-be1a-8c2336cb67fa tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: b5786197-8ba8-44e1-ac01-2c9837ca5ec6] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 909.746326] env[69367]: DEBUG oslo_concurrency.lockutils [None req-c6dcbadb-7650-4ae1-8fea-d19613654f29 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Acquiring lock "f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 909.746807] env[69367]: DEBUG oslo_concurrency.lockutils [None req-c6dcbadb-7650-4ae1-8fea-d19613654f29 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Lock "f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 909.747141] env[69367]: DEBUG oslo_concurrency.lockutils [None req-c6dcbadb-7650-4ae1-8fea-d19613654f29 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Acquiring lock "f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 909.747440] env[69367]: DEBUG oslo_concurrency.lockutils [None req-c6dcbadb-7650-4ae1-8fea-d19613654f29 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Lock "f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 909.747668] env[69367]: DEBUG oslo_concurrency.lockutils [None req-c6dcbadb-7650-4ae1-8fea-d19613654f29 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Lock "f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 909.750107] env[69367]: INFO nova.compute.manager [None req-c6dcbadb-7650-4ae1-8fea-d19613654f29 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] Terminating instance [ 909.772307] env[69367]: DEBUG oslo_vmware.api [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Task: {'id': task-4234134, 'name': PowerOnVM_Task, 'duration_secs': 1.564583} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.772555] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Powered on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 909.772789] env[69367]: INFO nova.compute.manager [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Took 12.39 seconds to spawn the instance on the hypervisor. [ 909.773015] env[69367]: DEBUG nova.compute.manager [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Checking state {{(pid=69367) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 909.773914] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ef5b669-61d7-43d4-b64e-f1d8a658d729 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.860569] env[69367]: DEBUG oslo_vmware.api [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Task: {'id': task-4234136, 'name': ReconfigVM_Task, 'duration_secs': 0.691228} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.860847] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 42db60d9-e5f7-4925-8f6f-d3884687414a] Reconfigured VM instance instance-0000005e to attach disk [datastore2] 42db60d9-e5f7-4925-8f6f-d3884687414a/42db60d9-e5f7-4925-8f6f-d3884687414a.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 909.861573] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c027c8f5-9623-4724-8a77-7e23d9e57acb {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.868954] env[69367]: DEBUG oslo_vmware.api [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Waiting for the task: (returnval){ [ 909.868954] env[69367]: value = "task-4234137" [ 909.868954] env[69367]: _type = "Task" [ 909.868954] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.880360] env[69367]: DEBUG oslo_vmware.api [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Task: {'id': task-4234137, 'name': Rename_Task} progress is 5%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.935891] env[69367]: ERROR nova.scheduler.client.report [None req-5d288ef0-a540-4075-bd94-d745566f8b08 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [req-6428efec-827f-4982-92ee-3ba80c7c9033] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-6428efec-827f-4982-92ee-3ba80c7c9033"}]} [ 909.936289] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5d288ef0-a540-4075-bd94-d745566f8b08 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.169s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 909.936888] env[69367]: ERROR nova.compute.manager [None req-5d288ef0-a540-4075-bd94-d745566f8b08 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 1b57dbcb-527e-4142-8dbf-5622978a7c02] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 909.936888] env[69367]: ERROR nova.compute.manager [instance: 1b57dbcb-527e-4142-8dbf-5622978a7c02] Traceback (most recent call last): [ 909.936888] env[69367]: ERROR nova.compute.manager [instance: 1b57dbcb-527e-4142-8dbf-5622978a7c02] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 909.936888] env[69367]: ERROR nova.compute.manager [instance: 1b57dbcb-527e-4142-8dbf-5622978a7c02] yield [ 909.936888] env[69367]: ERROR nova.compute.manager [instance: 1b57dbcb-527e-4142-8dbf-5622978a7c02] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 909.936888] env[69367]: ERROR nova.compute.manager [instance: 1b57dbcb-527e-4142-8dbf-5622978a7c02] self.set_inventory_for_provider( [ 909.936888] env[69367]: ERROR nova.compute.manager [instance: 1b57dbcb-527e-4142-8dbf-5622978a7c02] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 909.936888] env[69367]: ERROR nova.compute.manager [instance: 1b57dbcb-527e-4142-8dbf-5622978a7c02] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 909.936888] env[69367]: ERROR nova.compute.manager [instance: 1b57dbcb-527e-4142-8dbf-5622978a7c02] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-6428efec-827f-4982-92ee-3ba80c7c9033"}]} [ 909.936888] env[69367]: ERROR nova.compute.manager [instance: 1b57dbcb-527e-4142-8dbf-5622978a7c02] [ 909.936888] env[69367]: ERROR nova.compute.manager [instance: 1b57dbcb-527e-4142-8dbf-5622978a7c02] During handling of the above exception, another exception occurred: [ 909.936888] env[69367]: ERROR nova.compute.manager [instance: 1b57dbcb-527e-4142-8dbf-5622978a7c02] [ 909.936888] env[69367]: ERROR nova.compute.manager [instance: 1b57dbcb-527e-4142-8dbf-5622978a7c02] Traceback (most recent call last): [ 909.936888] env[69367]: ERROR nova.compute.manager [instance: 1b57dbcb-527e-4142-8dbf-5622978a7c02] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 909.936888] env[69367]: ERROR nova.compute.manager [instance: 1b57dbcb-527e-4142-8dbf-5622978a7c02] with self.rt.instance_claim(context, instance, node, allocs, [ 909.936888] env[69367]: ERROR nova.compute.manager [instance: 1b57dbcb-527e-4142-8dbf-5622978a7c02] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 909.936888] env[69367]: ERROR nova.compute.manager [instance: 1b57dbcb-527e-4142-8dbf-5622978a7c02] return f(*args, **kwargs) [ 909.936888] env[69367]: ERROR nova.compute.manager [instance: 1b57dbcb-527e-4142-8dbf-5622978a7c02] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 909.936888] env[69367]: ERROR nova.compute.manager [instance: 1b57dbcb-527e-4142-8dbf-5622978a7c02] self._update(elevated, cn) [ 909.936888] env[69367]: ERROR nova.compute.manager [instance: 1b57dbcb-527e-4142-8dbf-5622978a7c02] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 909.936888] env[69367]: ERROR nova.compute.manager [instance: 1b57dbcb-527e-4142-8dbf-5622978a7c02] self._update_to_placement(context, compute_node, startup) [ 909.936888] env[69367]: ERROR nova.compute.manager [instance: 1b57dbcb-527e-4142-8dbf-5622978a7c02] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 909.936888] env[69367]: ERROR nova.compute.manager [instance: 1b57dbcb-527e-4142-8dbf-5622978a7c02] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 909.936888] env[69367]: ERROR nova.compute.manager [instance: 1b57dbcb-527e-4142-8dbf-5622978a7c02] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 909.936888] env[69367]: ERROR nova.compute.manager [instance: 1b57dbcb-527e-4142-8dbf-5622978a7c02] return attempt.get(self._wrap_exception) [ 909.936888] env[69367]: ERROR nova.compute.manager [instance: 1b57dbcb-527e-4142-8dbf-5622978a7c02] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 909.936888] env[69367]: ERROR nova.compute.manager [instance: 1b57dbcb-527e-4142-8dbf-5622978a7c02] six.reraise(self.value[0], self.value[1], self.value[2]) [ 909.936888] env[69367]: ERROR nova.compute.manager [instance: 1b57dbcb-527e-4142-8dbf-5622978a7c02] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 909.936888] env[69367]: ERROR nova.compute.manager [instance: 1b57dbcb-527e-4142-8dbf-5622978a7c02] raise value [ 909.936888] env[69367]: ERROR nova.compute.manager [instance: 1b57dbcb-527e-4142-8dbf-5622978a7c02] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 909.936888] env[69367]: ERROR nova.compute.manager [instance: 1b57dbcb-527e-4142-8dbf-5622978a7c02] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 909.936888] env[69367]: ERROR nova.compute.manager [instance: 1b57dbcb-527e-4142-8dbf-5622978a7c02] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 909.936888] env[69367]: ERROR nova.compute.manager [instance: 1b57dbcb-527e-4142-8dbf-5622978a7c02] self.reportclient.update_from_provider_tree( [ 909.936888] env[69367]: ERROR nova.compute.manager [instance: 1b57dbcb-527e-4142-8dbf-5622978a7c02] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 909.938462] env[69367]: ERROR nova.compute.manager [instance: 1b57dbcb-527e-4142-8dbf-5622978a7c02] with catch_all(pd.uuid): [ 909.938462] env[69367]: ERROR nova.compute.manager [instance: 1b57dbcb-527e-4142-8dbf-5622978a7c02] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 909.938462] env[69367]: ERROR nova.compute.manager [instance: 1b57dbcb-527e-4142-8dbf-5622978a7c02] self.gen.throw(typ, value, traceback) [ 909.938462] env[69367]: ERROR nova.compute.manager [instance: 1b57dbcb-527e-4142-8dbf-5622978a7c02] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 909.938462] env[69367]: ERROR nova.compute.manager [instance: 1b57dbcb-527e-4142-8dbf-5622978a7c02] raise exception.ResourceProviderSyncFailed() [ 909.938462] env[69367]: ERROR nova.compute.manager [instance: 1b57dbcb-527e-4142-8dbf-5622978a7c02] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 909.938462] env[69367]: ERROR nova.compute.manager [instance: 1b57dbcb-527e-4142-8dbf-5622978a7c02] [ 909.938462] env[69367]: DEBUG nova.compute.utils [None req-5d288ef0-a540-4075-bd94-d745566f8b08 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 1b57dbcb-527e-4142-8dbf-5622978a7c02] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 909.938829] env[69367]: DEBUG oslo_concurrency.lockutils [None req-df9d3f41-569a-4508-a654-1d82f08ee23b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.433s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 909.939448] env[69367]: DEBUG nova.objects.instance [None req-df9d3f41-569a-4508-a654-1d82f08ee23b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Lazy-loading 'resources' on Instance uuid 95efcff3-a81b-49fb-b85a-dae060c023b2 {{(pid=69367) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 909.940263] env[69367]: DEBUG nova.compute.manager [None req-5d288ef0-a540-4075-bd94-d745566f8b08 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 1b57dbcb-527e-4142-8dbf-5622978a7c02] Build of instance 1b57dbcb-527e-4142-8dbf-5622978a7c02 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 909.940672] env[69367]: DEBUG nova.compute.manager [None req-5d288ef0-a540-4075-bd94-d745566f8b08 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 1b57dbcb-527e-4142-8dbf-5622978a7c02] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 909.940896] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5d288ef0-a540-4075-bd94-d745566f8b08 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Acquiring lock "refresh_cache-1b57dbcb-527e-4142-8dbf-5622978a7c02" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 909.941067] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5d288ef0-a540-4075-bd94-d745566f8b08 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Acquired lock "refresh_cache-1b57dbcb-527e-4142-8dbf-5622978a7c02" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 909.941227] env[69367]: DEBUG nova.network.neutron [None req-5d288ef0-a540-4075-bd94-d745566f8b08 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 1b57dbcb-527e-4142-8dbf-5622978a7c02] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 910.097025] env[69367]: INFO nova.compute.manager [None req-4be840a9-c4f6-4802-be1a-8c2336cb67fa tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] [instance: b5786197-8ba8-44e1-ac01-2c9837ca5ec6] Took 1.04 seconds to deallocate network for instance. [ 910.255899] env[69367]: DEBUG nova.compute.manager [None req-c6dcbadb-7650-4ae1-8fea-d19613654f29 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] Start destroying the instance on the hypervisor. {{(pid=69367) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 910.256110] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-c6dcbadb-7650-4ae1-8fea-d19613654f29 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] Destroying instance {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 910.257156] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f6d9b22-741d-413f-bd6a-cf2055cdf460 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.266328] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6dcbadb-7650-4ae1-8fea-d19613654f29 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] Powering off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 910.266599] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ace47c93-d8e3-46ba-9b3c-046e1c50ab9f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.276070] env[69367]: DEBUG oslo_vmware.api [None req-c6dcbadb-7650-4ae1-8fea-d19613654f29 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Waiting for the task: (returnval){ [ 910.276070] env[69367]: value = "task-4234138" [ 910.276070] env[69367]: _type = "Task" [ 910.276070] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.290705] env[69367]: DEBUG oslo_vmware.api [None req-c6dcbadb-7650-4ae1-8fea-d19613654f29 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Task: {'id': task-4234138, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.295388] env[69367]: INFO nova.compute.manager [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Took 35.10 seconds to build instance. [ 910.380176] env[69367]: DEBUG oslo_vmware.api [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Task: {'id': task-4234137, 'name': Rename_Task, 'duration_secs': 0.169224} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.380454] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 42db60d9-e5f7-4925-8f6f-d3884687414a] Powering on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 910.380714] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-87db152c-5ffd-49b8-a4e1-a0f0c0de997a {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.388114] env[69367]: DEBUG oslo_vmware.api [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Waiting for the task: (returnval){ [ 910.388114] env[69367]: value = "task-4234139" [ 910.388114] env[69367]: _type = "Task" [ 910.388114] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.397978] env[69367]: DEBUG oslo_vmware.api [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Task: {'id': task-4234139, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.463375] env[69367]: DEBUG nova.scheduler.client.report [None req-df9d3f41-569a-4508-a654-1d82f08ee23b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 910.469205] env[69367]: DEBUG nova.network.neutron [None req-5d288ef0-a540-4075-bd94-d745566f8b08 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 1b57dbcb-527e-4142-8dbf-5622978a7c02] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 910.481085] env[69367]: DEBUG nova.scheduler.client.report [None req-df9d3f41-569a-4508-a654-1d82f08ee23b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 910.481412] env[69367]: DEBUG nova.compute.provider_tree [None req-df9d3f41-569a-4508-a654-1d82f08ee23b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 910.495321] env[69367]: DEBUG nova.scheduler.client.report [None req-df9d3f41-569a-4508-a654-1d82f08ee23b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 910.515182] env[69367]: DEBUG nova.scheduler.client.report [None req-df9d3f41-569a-4508-a654-1d82f08ee23b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 910.576499] env[69367]: DEBUG nova.network.neutron [None req-5d288ef0-a540-4075-bd94-d745566f8b08 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 1b57dbcb-527e-4142-8dbf-5622978a7c02] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 910.788916] env[69367]: DEBUG oslo_vmware.api [None req-c6dcbadb-7650-4ae1-8fea-d19613654f29 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Task: {'id': task-4234138, 'name': PowerOffVM_Task, 'duration_secs': 0.261416} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.789641] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6dcbadb-7650-4ae1-8fea-d19613654f29 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] Powered off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 910.789641] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-c6dcbadb-7650-4ae1-8fea-d19613654f29 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] Unregistering the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 910.789773] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b0eafad9-c3ac-443e-8462-9308594e2f8f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.801853] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a8de2b5c-68bd-4ef3-8e82-8594b852f101 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Lock "652e2e23-7927-46ce-b8af-fffdb6ac8a3e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.613s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 910.841689] env[69367]: DEBUG oslo_concurrency.lockutils [None req-22a6e440-2f28-49a6-9890-9c7ebb48122d tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Acquiring lock "652e2e23-7927-46ce-b8af-fffdb6ac8a3e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 910.842512] env[69367]: DEBUG oslo_concurrency.lockutils [None req-22a6e440-2f28-49a6-9890-9c7ebb48122d tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Lock "652e2e23-7927-46ce-b8af-fffdb6ac8a3e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 910.842512] env[69367]: DEBUG oslo_concurrency.lockutils [None req-22a6e440-2f28-49a6-9890-9c7ebb48122d tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Acquiring lock "652e2e23-7927-46ce-b8af-fffdb6ac8a3e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 910.842512] env[69367]: DEBUG oslo_concurrency.lockutils [None req-22a6e440-2f28-49a6-9890-9c7ebb48122d tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Lock "652e2e23-7927-46ce-b8af-fffdb6ac8a3e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 910.842693] env[69367]: DEBUG oslo_concurrency.lockutils [None req-22a6e440-2f28-49a6-9890-9c7ebb48122d tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Lock "652e2e23-7927-46ce-b8af-fffdb6ac8a3e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 910.844930] env[69367]: INFO nova.compute.manager [None req-22a6e440-2f28-49a6-9890-9c7ebb48122d tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Terminating instance [ 910.859481] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-c6dcbadb-7650-4ae1-8fea-d19613654f29 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] Unregistered the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 910.859708] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-c6dcbadb-7650-4ae1-8fea-d19613654f29 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] Deleting contents of the VM from datastore datastore2 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 910.859893] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6dcbadb-7650-4ae1-8fea-d19613654f29 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Deleting the datastore file [datastore2] f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3 {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 910.863123] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f8e9fcde-160a-45b8-8269-cd46ba03ce54 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.870894] env[69367]: DEBUG oslo_vmware.api [None req-c6dcbadb-7650-4ae1-8fea-d19613654f29 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Waiting for the task: (returnval){ [ 910.870894] env[69367]: value = "task-4234141" [ 910.870894] env[69367]: _type = "Task" [ 910.870894] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.879729] env[69367]: DEBUG oslo_vmware.api [None req-c6dcbadb-7650-4ae1-8fea-d19613654f29 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Task: {'id': task-4234141, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.893922] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da54966f-d5df-4276-8c9b-ae1e8be0bd4f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.902887] env[69367]: DEBUG oslo_vmware.api [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Task: {'id': task-4234139, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.905555] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51617981-cb16-4f30-80f0-eb21b24976c4 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.938616] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa282ac6-fc44-49e8-9af2-fd7bb5ec9728 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.947428] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-281e6f1b-6417-4072-afa8-4f8ac49224f3 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.961508] env[69367]: DEBUG nova.compute.provider_tree [None req-df9d3f41-569a-4508-a654-1d82f08ee23b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 911.081505] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5d288ef0-a540-4075-bd94-d745566f8b08 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Releasing lock "refresh_cache-1b57dbcb-527e-4142-8dbf-5622978a7c02" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 911.081701] env[69367]: DEBUG nova.compute.manager [None req-5d288ef0-a540-4075-bd94-d745566f8b08 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 911.081889] env[69367]: DEBUG nova.compute.manager [None req-5d288ef0-a540-4075-bd94-d745566f8b08 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 1b57dbcb-527e-4142-8dbf-5622978a7c02] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 911.082084] env[69367]: DEBUG nova.network.neutron [None req-5d288ef0-a540-4075-bd94-d745566f8b08 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 1b57dbcb-527e-4142-8dbf-5622978a7c02] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 911.099538] env[69367]: DEBUG nova.network.neutron [None req-5d288ef0-a540-4075-bd94-d745566f8b08 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 1b57dbcb-527e-4142-8dbf-5622978a7c02] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 911.131867] env[69367]: INFO nova.scheduler.client.report [None req-4be840a9-c4f6-4802-be1a-8c2336cb67fa tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Deleted allocations for instance b5786197-8ba8-44e1-ac01-2c9837ca5ec6 [ 911.351214] env[69367]: DEBUG nova.compute.manager [None req-22a6e440-2f28-49a6-9890-9c7ebb48122d tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Start destroying the instance on the hypervisor. {{(pid=69367) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 911.351482] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-22a6e440-2f28-49a6-9890-9c7ebb48122d tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Destroying instance {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 911.352490] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09411eab-bc57-4a45-8ff3-2a5d9ecb410e {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.361091] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-22a6e440-2f28-49a6-9890-9c7ebb48122d tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Powering off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 911.362031] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b62f9b1f-641d-42b7-8ac3-02d195d62dc6 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.369185] env[69367]: DEBUG oslo_vmware.api [None req-22a6e440-2f28-49a6-9890-9c7ebb48122d tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Waiting for the task: (returnval){ [ 911.369185] env[69367]: value = "task-4234142" [ 911.369185] env[69367]: _type = "Task" [ 911.369185] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.381615] env[69367]: DEBUG oslo_vmware.api [None req-22a6e440-2f28-49a6-9890-9c7ebb48122d tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Task: {'id': task-4234142, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.385376] env[69367]: DEBUG oslo_vmware.api [None req-c6dcbadb-7650-4ae1-8fea-d19613654f29 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Task: {'id': task-4234141, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.371064} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.385725] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6dcbadb-7650-4ae1-8fea-d19613654f29 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Deleted the datastore file {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 911.385969] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-c6dcbadb-7650-4ae1-8fea-d19613654f29 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] Deleted contents of the VM from datastore datastore2 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 911.386208] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-c6dcbadb-7650-4ae1-8fea-d19613654f29 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] Instance destroyed {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 911.386463] env[69367]: INFO nova.compute.manager [None req-c6dcbadb-7650-4ae1-8fea-d19613654f29 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] Took 1.13 seconds to destroy the instance on the hypervisor. [ 911.386781] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c6dcbadb-7650-4ae1-8fea-d19613654f29 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 911.387120] env[69367]: DEBUG nova.compute.manager [-] [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 911.387120] env[69367]: DEBUG nova.network.neutron [-] [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 911.398661] env[69367]: DEBUG oslo_vmware.api [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Task: {'id': task-4234139, 'name': PowerOnVM_Task, 'duration_secs': 0.655633} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.398974] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 42db60d9-e5f7-4925-8f6f-d3884687414a] Powered on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 911.399184] env[69367]: INFO nova.compute.manager [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 42db60d9-e5f7-4925-8f6f-d3884687414a] Took 8.86 seconds to spawn the instance on the hypervisor. [ 911.400280] env[69367]: DEBUG nova.compute.manager [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 42db60d9-e5f7-4925-8f6f-d3884687414a] Checking state {{(pid=69367) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 911.400280] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88b2dd3b-ad0e-424e-8ed3-bb71caae29b4 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.483786] env[69367]: ERROR nova.scheduler.client.report [None req-df9d3f41-569a-4508-a654-1d82f08ee23b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [req-abe0ede3-4198-4d99-b4ea-8007eb0260c7] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-abe0ede3-4198-4d99-b4ea-8007eb0260c7"}]} [ 911.484167] env[69367]: DEBUG oslo_concurrency.lockutils [None req-df9d3f41-569a-4508-a654-1d82f08ee23b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.545s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 911.484787] env[69367]: ERROR nova.compute.manager [None req-df9d3f41-569a-4508-a654-1d82f08ee23b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Setting instance vm_state to ERROR: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 911.484787] env[69367]: ERROR nova.compute.manager [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Traceback (most recent call last): [ 911.484787] env[69367]: ERROR nova.compute.manager [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 911.484787] env[69367]: ERROR nova.compute.manager [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] yield [ 911.484787] env[69367]: ERROR nova.compute.manager [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 911.484787] env[69367]: ERROR nova.compute.manager [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] self.set_inventory_for_provider( [ 911.484787] env[69367]: ERROR nova.compute.manager [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 911.484787] env[69367]: ERROR nova.compute.manager [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 911.484787] env[69367]: ERROR nova.compute.manager [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-abe0ede3-4198-4d99-b4ea-8007eb0260c7"}]} [ 911.484787] env[69367]: ERROR nova.compute.manager [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] [ 911.484787] env[69367]: ERROR nova.compute.manager [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] During handling of the above exception, another exception occurred: [ 911.484787] env[69367]: ERROR nova.compute.manager [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] [ 911.484787] env[69367]: ERROR nova.compute.manager [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Traceback (most recent call last): [ 911.484787] env[69367]: ERROR nova.compute.manager [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 911.484787] env[69367]: ERROR nova.compute.manager [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] self._delete_instance(context, instance, bdms) [ 911.484787] env[69367]: ERROR nova.compute.manager [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 911.484787] env[69367]: ERROR nova.compute.manager [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] self._complete_deletion(context, instance) [ 911.484787] env[69367]: ERROR nova.compute.manager [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 911.484787] env[69367]: ERROR nova.compute.manager [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] self._update_resource_tracker(context, instance) [ 911.484787] env[69367]: ERROR nova.compute.manager [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 911.484787] env[69367]: ERROR nova.compute.manager [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] self.rt.update_usage(context, instance, instance.node) [ 911.484787] env[69367]: ERROR nova.compute.manager [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 911.484787] env[69367]: ERROR nova.compute.manager [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] return f(*args, **kwargs) [ 911.484787] env[69367]: ERROR nova.compute.manager [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 911.484787] env[69367]: ERROR nova.compute.manager [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] self._update(context.elevated(), self.compute_nodes[nodename]) [ 911.484787] env[69367]: ERROR nova.compute.manager [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 911.484787] env[69367]: ERROR nova.compute.manager [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] self._update_to_placement(context, compute_node, startup) [ 911.484787] env[69367]: ERROR nova.compute.manager [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 911.484787] env[69367]: ERROR nova.compute.manager [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 911.484787] env[69367]: ERROR nova.compute.manager [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 911.484787] env[69367]: ERROR nova.compute.manager [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] return attempt.get(self._wrap_exception) [ 911.484787] env[69367]: ERROR nova.compute.manager [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 911.484787] env[69367]: ERROR nova.compute.manager [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] six.reraise(self.value[0], self.value[1], self.value[2]) [ 911.484787] env[69367]: ERROR nova.compute.manager [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 911.485686] env[69367]: ERROR nova.compute.manager [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] raise value [ 911.485686] env[69367]: ERROR nova.compute.manager [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 911.485686] env[69367]: ERROR nova.compute.manager [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 911.485686] env[69367]: ERROR nova.compute.manager [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 911.485686] env[69367]: ERROR nova.compute.manager [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] self.reportclient.update_from_provider_tree( [ 911.485686] env[69367]: ERROR nova.compute.manager [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 911.485686] env[69367]: ERROR nova.compute.manager [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] with catch_all(pd.uuid): [ 911.485686] env[69367]: ERROR nova.compute.manager [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 911.485686] env[69367]: ERROR nova.compute.manager [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] self.gen.throw(typ, value, traceback) [ 911.485686] env[69367]: ERROR nova.compute.manager [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 911.485686] env[69367]: ERROR nova.compute.manager [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] raise exception.ResourceProviderSyncFailed() [ 911.485686] env[69367]: ERROR nova.compute.manager [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 911.485686] env[69367]: ERROR nova.compute.manager [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] [ 911.487174] env[69367]: DEBUG oslo_concurrency.lockutils [None req-58e3c20d-1527-4eb1-809c-3e88b2533207 tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.573s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 911.488943] env[69367]: INFO nova.compute.claims [None req-58e3c20d-1527-4eb1-809c-3e88b2533207 tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: e2639eea-9e67-45b5-acf0-5b015b4c0a1e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 911.602396] env[69367]: DEBUG nova.network.neutron [None req-5d288ef0-a540-4075-bd94-d745566f8b08 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 1b57dbcb-527e-4142-8dbf-5622978a7c02] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 911.641437] env[69367]: DEBUG oslo_concurrency.lockutils [None req-4be840a9-c4f6-4802-be1a-8c2336cb67fa tempest-DeleteServersTestJSON-900161964 tempest-DeleteServersTestJSON-900161964-project-member] Lock "b5786197-8ba8-44e1-ac01-2c9837ca5ec6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.677s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 911.777206] env[69367]: DEBUG nova.compute.manager [req-8516b59f-7fba-436a-b42f-da491c2c62bf req-076f4386-77ad-4a3f-9453-267541840784 service nova] [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] Received event network-vif-deleted-05366f50-f66d-46a5-8e2c-f3c687488099 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 911.777258] env[69367]: INFO nova.compute.manager [req-8516b59f-7fba-436a-b42f-da491c2c62bf req-076f4386-77ad-4a3f-9453-267541840784 service nova] [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] Neutron deleted interface 05366f50-f66d-46a5-8e2c-f3c687488099; detaching it from the instance and deleting it from the info cache [ 911.777473] env[69367]: DEBUG nova.network.neutron [req-8516b59f-7fba-436a-b42f-da491c2c62bf req-076f4386-77ad-4a3f-9453-267541840784 service nova] [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 911.881761] env[69367]: DEBUG oslo_vmware.api [None req-22a6e440-2f28-49a6-9890-9c7ebb48122d tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Task: {'id': task-4234142, 'name': PowerOffVM_Task, 'duration_secs': 0.233834} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.882271] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-22a6e440-2f28-49a6-9890-9c7ebb48122d tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Powered off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 911.882947] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-22a6e440-2f28-49a6-9890-9c7ebb48122d tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Unregistering the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 911.882947] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ef215fd1-8447-4945-b1aa-13a9dafd451b {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.922015] env[69367]: INFO nova.compute.manager [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 42db60d9-e5f7-4925-8f6f-d3884687414a] Took 28.57 seconds to build instance. [ 911.993252] env[69367]: DEBUG oslo_concurrency.lockutils [None req-df9d3f41-569a-4508-a654-1d82f08ee23b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Lock "95efcff3-a81b-49fb-b85a-dae060c023b2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.954s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 912.000133] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-22a6e440-2f28-49a6-9890-9c7ebb48122d tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Unregistered the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 912.003023] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-22a6e440-2f28-49a6-9890-9c7ebb48122d tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Deleting contents of the VM from datastore datastore2 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 912.003023] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-22a6e440-2f28-49a6-9890-9c7ebb48122d tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Deleting the datastore file [datastore2] 652e2e23-7927-46ce-b8af-fffdb6ac8a3e {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 912.003023] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c8b191b1-ec9b-4cdc-b88d-3980ad547adf {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.010535] env[69367]: DEBUG oslo_vmware.api [None req-22a6e440-2f28-49a6-9890-9c7ebb48122d tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Waiting for the task: (returnval){ [ 912.010535] env[69367]: value = "task-4234144" [ 912.010535] env[69367]: _type = "Task" [ 912.010535] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.022634] env[69367]: DEBUG oslo_vmware.api [None req-22a6e440-2f28-49a6-9890-9c7ebb48122d tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Task: {'id': task-4234144, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.105822] env[69367]: INFO nova.compute.manager [None req-5d288ef0-a540-4075-bd94-d745566f8b08 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 1b57dbcb-527e-4142-8dbf-5622978a7c02] Took 1.02 seconds to deallocate network for instance. [ 912.206222] env[69367]: DEBUG nova.network.neutron [-] [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 912.280281] env[69367]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-56fd88cb-c2af-4574-8e93-905589ac82ac {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.290096] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ab92916-35de-4992-8af5-dfe83a9c9c55 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.317230] env[69367]: DEBUG nova.compute.manager [req-8516b59f-7fba-436a-b42f-da491c2c62bf req-076f4386-77ad-4a3f-9453-267541840784 service nova] [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] Detach interface failed, port_id=05366f50-f66d-46a5-8e2c-f3c687488099, reason: Instance f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3 could not be found. {{(pid=69367) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11601}} [ 912.424189] env[69367]: DEBUG oslo_concurrency.lockutils [None req-26ec5c0c-f9e7-4ffd-a911-15654e71f770 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Lock "42db60d9-e5f7-4925-8f6f-d3884687414a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.078s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 912.523890] env[69367]: DEBUG oslo_vmware.api [None req-22a6e440-2f28-49a6-9890-9c7ebb48122d tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Task: {'id': task-4234144, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.216276} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.524203] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-22a6e440-2f28-49a6-9890-9c7ebb48122d tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Deleted the datastore file {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 912.524414] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-22a6e440-2f28-49a6-9890-9c7ebb48122d tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Deleted contents of the VM from datastore datastore2 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 912.524631] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-22a6e440-2f28-49a6-9890-9c7ebb48122d tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Instance destroyed {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 912.524812] env[69367]: INFO nova.compute.manager [None req-22a6e440-2f28-49a6-9890-9c7ebb48122d tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Took 1.17 seconds to destroy the instance on the hypervisor. [ 912.525068] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-22a6e440-2f28-49a6-9890-9c7ebb48122d tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 912.525273] env[69367]: DEBUG nova.compute.manager [-] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 912.525371] env[69367]: DEBUG nova.network.neutron [-] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 912.527905] env[69367]: DEBUG nova.scheduler.client.report [None req-58e3c20d-1527-4eb1-809c-3e88b2533207 tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 912.544195] env[69367]: DEBUG nova.scheduler.client.report [None req-58e3c20d-1527-4eb1-809c-3e88b2533207 tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 912.544428] env[69367]: DEBUG nova.compute.provider_tree [None req-58e3c20d-1527-4eb1-809c-3e88b2533207 tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 912.556437] env[69367]: DEBUG nova.scheduler.client.report [None req-58e3c20d-1527-4eb1-809c-3e88b2533207 tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 912.580122] env[69367]: DEBUG nova.scheduler.client.report [None req-58e3c20d-1527-4eb1-809c-3e88b2533207 tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 912.709691] env[69367]: INFO nova.compute.manager [-] [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] Took 1.32 seconds to deallocate network for instance. [ 912.788848] env[69367]: DEBUG nova.compute.manager [req-c596c4e2-c671-4b34-8866-63fccfb79919 req-28669e9e-c89b-49c1-89f1-f5f1a2c4646e service nova] [instance: 42db60d9-e5f7-4925-8f6f-d3884687414a] Received event network-changed-1daae92e-1898-467c-be43-e8f27bff4242 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 912.789071] env[69367]: DEBUG nova.compute.manager [req-c596c4e2-c671-4b34-8866-63fccfb79919 req-28669e9e-c89b-49c1-89f1-f5f1a2c4646e service nova] [instance: 42db60d9-e5f7-4925-8f6f-d3884687414a] Refreshing instance network info cache due to event network-changed-1daae92e-1898-467c-be43-e8f27bff4242. {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11772}} [ 912.789633] env[69367]: DEBUG oslo_concurrency.lockutils [req-c596c4e2-c671-4b34-8866-63fccfb79919 req-28669e9e-c89b-49c1-89f1-f5f1a2c4646e service nova] Acquiring lock "refresh_cache-42db60d9-e5f7-4925-8f6f-d3884687414a" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 912.789899] env[69367]: DEBUG oslo_concurrency.lockutils [req-c596c4e2-c671-4b34-8866-63fccfb79919 req-28669e9e-c89b-49c1-89f1-f5f1a2c4646e service nova] Acquired lock "refresh_cache-42db60d9-e5f7-4925-8f6f-d3884687414a" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 912.790095] env[69367]: DEBUG nova.network.neutron [req-c596c4e2-c671-4b34-8866-63fccfb79919 req-28669e9e-c89b-49c1-89f1-f5f1a2c4646e service nova] [instance: 42db60d9-e5f7-4925-8f6f-d3884687414a] Refreshing network info cache for port 1daae92e-1898-467c-be43-e8f27bff4242 {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 913.027152] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbb63646-795d-4cf9-8b70-3ee5b3c4cdae {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.036085] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-603b8e76-3e1a-4ae0-9b3a-0d5bef927038 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.072671] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8afd4da-85d2-4bf6-bacc-7a6e295f136c {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.084912] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7476c0bc-58f1-4477-a9d7-a51d8307d5d8 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.101423] env[69367]: DEBUG nova.compute.provider_tree [None req-58e3c20d-1527-4eb1-809c-3e88b2533207 tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 913.138472] env[69367]: INFO nova.scheduler.client.report [None req-5d288ef0-a540-4075-bd94-d745566f8b08 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Deleted allocations for instance 1b57dbcb-527e-4142-8dbf-5622978a7c02 [ 913.226400] env[69367]: DEBUG oslo_concurrency.lockutils [None req-c6dcbadb-7650-4ae1-8fea-d19613654f29 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 913.510407] env[69367]: DEBUG oslo_concurrency.lockutils [None req-df9d3f41-569a-4508-a654-1d82f08ee23b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 913.633614] env[69367]: ERROR nova.scheduler.client.report [None req-58e3c20d-1527-4eb1-809c-3e88b2533207 tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [req-436f65f9-ff5c-4cd9-b597-cc809a9cb523] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-436f65f9-ff5c-4cd9-b597-cc809a9cb523"}]} [ 913.633614] env[69367]: DEBUG oslo_concurrency.lockutils [None req-58e3c20d-1527-4eb1-809c-3e88b2533207 tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.145s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 913.633614] env[69367]: ERROR nova.compute.manager [None req-58e3c20d-1527-4eb1-809c-3e88b2533207 tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: e2639eea-9e67-45b5-acf0-5b015b4c0a1e] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 913.633614] env[69367]: ERROR nova.compute.manager [instance: e2639eea-9e67-45b5-acf0-5b015b4c0a1e] Traceback (most recent call last): [ 913.633614] env[69367]: ERROR nova.compute.manager [instance: e2639eea-9e67-45b5-acf0-5b015b4c0a1e] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 913.633614] env[69367]: ERROR nova.compute.manager [instance: e2639eea-9e67-45b5-acf0-5b015b4c0a1e] yield [ 913.633614] env[69367]: ERROR nova.compute.manager [instance: e2639eea-9e67-45b5-acf0-5b015b4c0a1e] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 913.633614] env[69367]: ERROR nova.compute.manager [instance: e2639eea-9e67-45b5-acf0-5b015b4c0a1e] self.set_inventory_for_provider( [ 913.633614] env[69367]: ERROR nova.compute.manager [instance: e2639eea-9e67-45b5-acf0-5b015b4c0a1e] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 913.633614] env[69367]: ERROR nova.compute.manager [instance: e2639eea-9e67-45b5-acf0-5b015b4c0a1e] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 913.633614] env[69367]: ERROR nova.compute.manager [instance: e2639eea-9e67-45b5-acf0-5b015b4c0a1e] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-436f65f9-ff5c-4cd9-b597-cc809a9cb523"}]} [ 913.633614] env[69367]: ERROR nova.compute.manager [instance: e2639eea-9e67-45b5-acf0-5b015b4c0a1e] [ 913.633614] env[69367]: ERROR nova.compute.manager [instance: e2639eea-9e67-45b5-acf0-5b015b4c0a1e] During handling of the above exception, another exception occurred: [ 913.633614] env[69367]: ERROR nova.compute.manager [instance: e2639eea-9e67-45b5-acf0-5b015b4c0a1e] [ 913.633614] env[69367]: ERROR nova.compute.manager [instance: e2639eea-9e67-45b5-acf0-5b015b4c0a1e] Traceback (most recent call last): [ 913.633614] env[69367]: ERROR nova.compute.manager [instance: e2639eea-9e67-45b5-acf0-5b015b4c0a1e] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 913.633614] env[69367]: ERROR nova.compute.manager [instance: e2639eea-9e67-45b5-acf0-5b015b4c0a1e] with self.rt.instance_claim(context, instance, node, allocs, [ 913.633614] env[69367]: ERROR nova.compute.manager [instance: e2639eea-9e67-45b5-acf0-5b015b4c0a1e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 913.633614] env[69367]: ERROR nova.compute.manager [instance: e2639eea-9e67-45b5-acf0-5b015b4c0a1e] return f(*args, **kwargs) [ 913.633614] env[69367]: ERROR nova.compute.manager [instance: e2639eea-9e67-45b5-acf0-5b015b4c0a1e] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 913.633614] env[69367]: ERROR nova.compute.manager [instance: e2639eea-9e67-45b5-acf0-5b015b4c0a1e] self._update(elevated, cn) [ 913.633614] env[69367]: ERROR nova.compute.manager [instance: e2639eea-9e67-45b5-acf0-5b015b4c0a1e] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 913.633614] env[69367]: ERROR nova.compute.manager [instance: e2639eea-9e67-45b5-acf0-5b015b4c0a1e] self._update_to_placement(context, compute_node, startup) [ 913.633614] env[69367]: ERROR nova.compute.manager [instance: e2639eea-9e67-45b5-acf0-5b015b4c0a1e] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 913.633614] env[69367]: ERROR nova.compute.manager [instance: e2639eea-9e67-45b5-acf0-5b015b4c0a1e] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 913.634491] env[69367]: ERROR nova.compute.manager [instance: e2639eea-9e67-45b5-acf0-5b015b4c0a1e] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 913.634491] env[69367]: ERROR nova.compute.manager [instance: e2639eea-9e67-45b5-acf0-5b015b4c0a1e] return attempt.get(self._wrap_exception) [ 913.634491] env[69367]: ERROR nova.compute.manager [instance: e2639eea-9e67-45b5-acf0-5b015b4c0a1e] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 913.634491] env[69367]: ERROR nova.compute.manager [instance: e2639eea-9e67-45b5-acf0-5b015b4c0a1e] six.reraise(self.value[0], self.value[1], self.value[2]) [ 913.634491] env[69367]: ERROR nova.compute.manager [instance: e2639eea-9e67-45b5-acf0-5b015b4c0a1e] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 913.634491] env[69367]: ERROR nova.compute.manager [instance: e2639eea-9e67-45b5-acf0-5b015b4c0a1e] raise value [ 913.634491] env[69367]: ERROR nova.compute.manager [instance: e2639eea-9e67-45b5-acf0-5b015b4c0a1e] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 913.634491] env[69367]: ERROR nova.compute.manager [instance: e2639eea-9e67-45b5-acf0-5b015b4c0a1e] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 913.634491] env[69367]: ERROR nova.compute.manager [instance: e2639eea-9e67-45b5-acf0-5b015b4c0a1e] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 913.634491] env[69367]: ERROR nova.compute.manager [instance: e2639eea-9e67-45b5-acf0-5b015b4c0a1e] self.reportclient.update_from_provider_tree( [ 913.634491] env[69367]: ERROR nova.compute.manager [instance: e2639eea-9e67-45b5-acf0-5b015b4c0a1e] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 913.634491] env[69367]: ERROR nova.compute.manager [instance: e2639eea-9e67-45b5-acf0-5b015b4c0a1e] with catch_all(pd.uuid): [ 913.634491] env[69367]: ERROR nova.compute.manager [instance: e2639eea-9e67-45b5-acf0-5b015b4c0a1e] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 913.634491] env[69367]: ERROR nova.compute.manager [instance: e2639eea-9e67-45b5-acf0-5b015b4c0a1e] self.gen.throw(typ, value, traceback) [ 913.634491] env[69367]: ERROR nova.compute.manager [instance: e2639eea-9e67-45b5-acf0-5b015b4c0a1e] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 913.634491] env[69367]: ERROR nova.compute.manager [instance: e2639eea-9e67-45b5-acf0-5b015b4c0a1e] raise exception.ResourceProviderSyncFailed() [ 913.634491] env[69367]: ERROR nova.compute.manager [instance: e2639eea-9e67-45b5-acf0-5b015b4c0a1e] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 913.634491] env[69367]: ERROR nova.compute.manager [instance: e2639eea-9e67-45b5-acf0-5b015b4c0a1e] [ 913.635491] env[69367]: DEBUG nova.compute.utils [None req-58e3c20d-1527-4eb1-809c-3e88b2533207 tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: e2639eea-9e67-45b5-acf0-5b015b4c0a1e] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 913.636948] env[69367]: DEBUG oslo_concurrency.lockutils [None req-bcce9e51-1784-4f95-8f13-28eea1be51a8 tempest-InstanceActionsNegativeTestJSON-92931964 tempest-InstanceActionsNegativeTestJSON-92931964-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.026s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 913.639037] env[69367]: INFO nova.compute.claims [None req-bcce9e51-1784-4f95-8f13-28eea1be51a8 tempest-InstanceActionsNegativeTestJSON-92931964 tempest-InstanceActionsNegativeTestJSON-92931964-project-member] [instance: b2c2b5d8-70ae-4fda-9926-c673be42569b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 913.642495] env[69367]: DEBUG nova.compute.manager [None req-58e3c20d-1527-4eb1-809c-3e88b2533207 tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: e2639eea-9e67-45b5-acf0-5b015b4c0a1e] Build of instance e2639eea-9e67-45b5-acf0-5b015b4c0a1e was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 913.643401] env[69367]: DEBUG nova.compute.manager [None req-58e3c20d-1527-4eb1-809c-3e88b2533207 tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: e2639eea-9e67-45b5-acf0-5b015b4c0a1e] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 913.644060] env[69367]: DEBUG oslo_concurrency.lockutils [None req-58e3c20d-1527-4eb1-809c-3e88b2533207 tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Acquiring lock "refresh_cache-e2639eea-9e67-45b5-acf0-5b015b4c0a1e" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 913.644519] env[69367]: DEBUG oslo_concurrency.lockutils [None req-58e3c20d-1527-4eb1-809c-3e88b2533207 tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Acquired lock "refresh_cache-e2639eea-9e67-45b5-acf0-5b015b4c0a1e" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 913.644971] env[69367]: DEBUG nova.network.neutron [None req-58e3c20d-1527-4eb1-809c-3e88b2533207 tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: e2639eea-9e67-45b5-acf0-5b015b4c0a1e] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 913.651428] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5d288ef0-a540-4075-bd94-d745566f8b08 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Lock "1b57dbcb-527e-4142-8dbf-5622978a7c02" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.943s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 913.833892] env[69367]: DEBUG nova.compute.manager [req-933c2bd3-1b04-4c57-9cf8-370db3233cf3 req-881a7945-e52f-4415-813f-d8f01cc0c9b0 service nova] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Received event network-vif-deleted-64046d9a-7f38-4310-893f-f0a44a81b191 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 913.834198] env[69367]: INFO nova.compute.manager [req-933c2bd3-1b04-4c57-9cf8-370db3233cf3 req-881a7945-e52f-4415-813f-d8f01cc0c9b0 service nova] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Neutron deleted interface 64046d9a-7f38-4310-893f-f0a44a81b191; detaching it from the instance and deleting it from the info cache [ 913.834625] env[69367]: DEBUG nova.network.neutron [req-933c2bd3-1b04-4c57-9cf8-370db3233cf3 req-881a7945-e52f-4415-813f-d8f01cc0c9b0 service nova] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Updating instance_info_cache with network_info: [{"id": "e8aa72cb-4b5c-4ac1-85d5-60be3f7372b2", "address": "fa:16:3e:7b:44:36", "network": {"id": "5f3745be-c047-4b85-9371-a372c6cd2521", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-78713587", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.148", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "68ad9e06b1fb4e5bbad98a14e0c55c60", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a92a4ffe-7939-4697-bf98-5b22e2c7feda", "external-id": "nsx-vlan-transportzone-732", "segmentation_id": 732, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8aa72cb-4b", "ovs_interfaceid": "e8aa72cb-4b5c-4ac1-85d5-60be3f7372b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 913.843496] env[69367]: DEBUG nova.network.neutron [req-c596c4e2-c671-4b34-8866-63fccfb79919 req-28669e9e-c89b-49c1-89f1-f5f1a2c4646e service nova] [instance: 42db60d9-e5f7-4925-8f6f-d3884687414a] Updated VIF entry in instance network info cache for port 1daae92e-1898-467c-be43-e8f27bff4242. {{(pid=69367) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 913.844152] env[69367]: DEBUG nova.network.neutron [req-c596c4e2-c671-4b34-8866-63fccfb79919 req-28669e9e-c89b-49c1-89f1-f5f1a2c4646e service nova] [instance: 42db60d9-e5f7-4925-8f6f-d3884687414a] Updating instance_info_cache with network_info: [{"id": "1daae92e-1898-467c-be43-e8f27bff4242", "address": "fa:16:3e:b1:9b:f0", "network": {"id": "a8e5c1a6-2526-4042-8a8d-fdb54dbe7bf9", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1429177141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.149", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f5bc3d470905412ea72a8eedb98e9e47", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e6f11c0d-c73a-47f5-b02e-47bff48da0e4", "external-id": "nsx-vlan-transportzone-345", "segmentation_id": 345, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1daae92e-18", "ovs_interfaceid": "1daae92e-1898-467c-be43-e8f27bff4242", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 913.881211] env[69367]: DEBUG nova.network.neutron [-] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 913.924322] env[69367]: DEBUG oslo_concurrency.lockutils [None req-dbe267e5-e67b-4ed3-9d86-cb6e320e7fb1 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Acquiring lock "31a4eae5-2dc3-4eec-aa93-d73d4f1be5ba" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 913.924577] env[69367]: DEBUG oslo_concurrency.lockutils [None req-dbe267e5-e67b-4ed3-9d86-cb6e320e7fb1 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Lock "31a4eae5-2dc3-4eec-aa93-d73d4f1be5ba" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 914.177441] env[69367]: DEBUG nova.network.neutron [None req-58e3c20d-1527-4eb1-809c-3e88b2533207 tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: e2639eea-9e67-45b5-acf0-5b015b4c0a1e] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 914.320113] env[69367]: DEBUG nova.network.neutron [None req-58e3c20d-1527-4eb1-809c-3e88b2533207 tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: e2639eea-9e67-45b5-acf0-5b015b4c0a1e] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 914.342539] env[69367]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-80f0519a-0fef-4589-9211-50d93bd6162d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.346758] env[69367]: DEBUG oslo_concurrency.lockutils [None req-571148d0-967c-42f0-92e5-f4d3d30b31a6 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Acquiring lock "95efcff3-a81b-49fb-b85a-dae060c023b2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 914.346986] env[69367]: DEBUG oslo_concurrency.lockutils [None req-571148d0-967c-42f0-92e5-f4d3d30b31a6 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Lock "95efcff3-a81b-49fb-b85a-dae060c023b2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 914.347239] env[69367]: DEBUG oslo_concurrency.lockutils [None req-571148d0-967c-42f0-92e5-f4d3d30b31a6 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Acquiring lock "95efcff3-a81b-49fb-b85a-dae060c023b2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 914.347462] env[69367]: DEBUG oslo_concurrency.lockutils [None req-571148d0-967c-42f0-92e5-f4d3d30b31a6 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Lock "95efcff3-a81b-49fb-b85a-dae060c023b2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 914.347611] env[69367]: DEBUG oslo_concurrency.lockutils [None req-571148d0-967c-42f0-92e5-f4d3d30b31a6 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Lock "95efcff3-a81b-49fb-b85a-dae060c023b2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 914.349851] env[69367]: DEBUG oslo_concurrency.lockutils [req-c596c4e2-c671-4b34-8866-63fccfb79919 req-28669e9e-c89b-49c1-89f1-f5f1a2c4646e service nova] Releasing lock "refresh_cache-42db60d9-e5f7-4925-8f6f-d3884687414a" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 914.351602] env[69367]: INFO nova.compute.manager [None req-571148d0-967c-42f0-92e5-f4d3d30b31a6 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Terminating instance [ 914.355943] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84997618-bc70-46fa-baa7-8072365e898c {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.387851] env[69367]: INFO nova.compute.manager [-] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Took 1.86 seconds to deallocate network for instance. [ 914.388343] env[69367]: DEBUG nova.compute.manager [req-933c2bd3-1b04-4c57-9cf8-370db3233cf3 req-881a7945-e52f-4415-813f-d8f01cc0c9b0 service nova] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Detach interface failed, port_id=64046d9a-7f38-4310-893f-f0a44a81b191, reason: Instance 652e2e23-7927-46ce-b8af-fffdb6ac8a3e could not be found. {{(pid=69367) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11601}} [ 914.388517] env[69367]: DEBUG nova.compute.manager [req-933c2bd3-1b04-4c57-9cf8-370db3233cf3 req-881a7945-e52f-4415-813f-d8f01cc0c9b0 service nova] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Received event network-vif-deleted-e8aa72cb-4b5c-4ac1-85d5-60be3f7372b2 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 914.388637] env[69367]: INFO nova.compute.manager [req-933c2bd3-1b04-4c57-9cf8-370db3233cf3 req-881a7945-e52f-4415-813f-d8f01cc0c9b0 service nova] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Neutron deleted interface e8aa72cb-4b5c-4ac1-85d5-60be3f7372b2; detaching it from the instance and deleting it from the info cache [ 914.388811] env[69367]: DEBUG nova.network.neutron [req-933c2bd3-1b04-4c57-9cf8-370db3233cf3 req-881a7945-e52f-4415-813f-d8f01cc0c9b0 service nova] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 914.426935] env[69367]: DEBUG nova.compute.manager [None req-dbe267e5-e67b-4ed3-9d86-cb6e320e7fb1 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 31a4eae5-2dc3-4eec-aa93-d73d4f1be5ba] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 914.679098] env[69367]: DEBUG nova.scheduler.client.report [None req-bcce9e51-1784-4f95-8f13-28eea1be51a8 tempest-InstanceActionsNegativeTestJSON-92931964 tempest-InstanceActionsNegativeTestJSON-92931964-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 914.694327] env[69367]: DEBUG nova.scheduler.client.report [None req-bcce9e51-1784-4f95-8f13-28eea1be51a8 tempest-InstanceActionsNegativeTestJSON-92931964 tempest-InstanceActionsNegativeTestJSON-92931964-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 914.694618] env[69367]: DEBUG nova.compute.provider_tree [None req-bcce9e51-1784-4f95-8f13-28eea1be51a8 tempest-InstanceActionsNegativeTestJSON-92931964 tempest-InstanceActionsNegativeTestJSON-92931964-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 914.707783] env[69367]: DEBUG nova.scheduler.client.report [None req-bcce9e51-1784-4f95-8f13-28eea1be51a8 tempest-InstanceActionsNegativeTestJSON-92931964 tempest-InstanceActionsNegativeTestJSON-92931964-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 914.735363] env[69367]: DEBUG nova.scheduler.client.report [None req-bcce9e51-1784-4f95-8f13-28eea1be51a8 tempest-InstanceActionsNegativeTestJSON-92931964 tempest-InstanceActionsNegativeTestJSON-92931964-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 914.824028] env[69367]: DEBUG oslo_concurrency.lockutils [None req-58e3c20d-1527-4eb1-809c-3e88b2533207 tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Releasing lock "refresh_cache-e2639eea-9e67-45b5-acf0-5b015b4c0a1e" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 914.824028] env[69367]: DEBUG nova.compute.manager [None req-58e3c20d-1527-4eb1-809c-3e88b2533207 tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 914.824187] env[69367]: DEBUG nova.compute.manager [None req-58e3c20d-1527-4eb1-809c-3e88b2533207 tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: e2639eea-9e67-45b5-acf0-5b015b4c0a1e] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 914.824338] env[69367]: DEBUG nova.network.neutron [None req-58e3c20d-1527-4eb1-809c-3e88b2533207 tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: e2639eea-9e67-45b5-acf0-5b015b4c0a1e] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 914.842686] env[69367]: DEBUG nova.network.neutron [None req-58e3c20d-1527-4eb1-809c-3e88b2533207 tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: e2639eea-9e67-45b5-acf0-5b015b4c0a1e] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 914.870526] env[69367]: DEBUG oslo_concurrency.lockutils [None req-571148d0-967c-42f0-92e5-f4d3d30b31a6 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Acquiring lock "refresh_cache-95efcff3-a81b-49fb-b85a-dae060c023b2" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 914.870638] env[69367]: DEBUG oslo_concurrency.lockutils [None req-571148d0-967c-42f0-92e5-f4d3d30b31a6 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Acquired lock "refresh_cache-95efcff3-a81b-49fb-b85a-dae060c023b2" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 914.870741] env[69367]: DEBUG nova.network.neutron [None req-571148d0-967c-42f0-92e5-f4d3d30b31a6 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 914.895551] env[69367]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-59f4c4c4-493f-4ee4-9499-63cb8a8c385c {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.898753] env[69367]: DEBUG oslo_concurrency.lockutils [None req-22a6e440-2f28-49a6-9890-9c7ebb48122d tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 914.907319] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da98174a-047d-44a0-8eb3-3e064544f2f0 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.942031] env[69367]: DEBUG nova.compute.manager [req-933c2bd3-1b04-4c57-9cf8-370db3233cf3 req-881a7945-e52f-4415-813f-d8f01cc0c9b0 service nova] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Detach interface failed, port_id=e8aa72cb-4b5c-4ac1-85d5-60be3f7372b2, reason: Instance 652e2e23-7927-46ce-b8af-fffdb6ac8a3e could not be found. {{(pid=69367) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11601}} [ 914.954740] env[69367]: DEBUG oslo_concurrency.lockutils [None req-dbe267e5-e67b-4ed3-9d86-cb6e320e7fb1 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 915.070564] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-730117a9-b182-4d0f-a516-488b6b8441b9 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.078711] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5a16dc6-86e2-46b2-a865-9d3c71d2b76f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.110737] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b033bc47-b7bf-4f73-bb2e-3d3ef168eff6 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.119129] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11c9e9b1-442a-4118-bc4a-69b2c14a1a7c {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.133211] env[69367]: DEBUG nova.compute.provider_tree [None req-bcce9e51-1784-4f95-8f13-28eea1be51a8 tempest-InstanceActionsNegativeTestJSON-92931964 tempest-InstanceActionsNegativeTestJSON-92931964-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 915.345192] env[69367]: DEBUG nova.network.neutron [None req-58e3c20d-1527-4eb1-809c-3e88b2533207 tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: e2639eea-9e67-45b5-acf0-5b015b4c0a1e] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 915.389939] env[69367]: DEBUG nova.network.neutron [None req-571148d0-967c-42f0-92e5-f4d3d30b31a6 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 915.468988] env[69367]: DEBUG nova.network.neutron [None req-571148d0-967c-42f0-92e5-f4d3d30b31a6 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 915.655686] env[69367]: ERROR nova.scheduler.client.report [None req-bcce9e51-1784-4f95-8f13-28eea1be51a8 tempest-InstanceActionsNegativeTestJSON-92931964 tempest-InstanceActionsNegativeTestJSON-92931964-project-member] [req-21fb3ff7-ec4f-4fab-946b-3a9621d24d8a] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-21fb3ff7-ec4f-4fab-946b-3a9621d24d8a"}]} [ 915.656075] env[69367]: DEBUG oslo_concurrency.lockutils [None req-bcce9e51-1784-4f95-8f13-28eea1be51a8 tempest-InstanceActionsNegativeTestJSON-92931964 tempest-InstanceActionsNegativeTestJSON-92931964-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.019s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 915.656682] env[69367]: ERROR nova.compute.manager [None req-bcce9e51-1784-4f95-8f13-28eea1be51a8 tempest-InstanceActionsNegativeTestJSON-92931964 tempest-InstanceActionsNegativeTestJSON-92931964-project-member] [instance: b2c2b5d8-70ae-4fda-9926-c673be42569b] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 915.656682] env[69367]: ERROR nova.compute.manager [instance: b2c2b5d8-70ae-4fda-9926-c673be42569b] Traceback (most recent call last): [ 915.656682] env[69367]: ERROR nova.compute.manager [instance: b2c2b5d8-70ae-4fda-9926-c673be42569b] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 915.656682] env[69367]: ERROR nova.compute.manager [instance: b2c2b5d8-70ae-4fda-9926-c673be42569b] yield [ 915.656682] env[69367]: ERROR nova.compute.manager [instance: b2c2b5d8-70ae-4fda-9926-c673be42569b] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 915.656682] env[69367]: ERROR nova.compute.manager [instance: b2c2b5d8-70ae-4fda-9926-c673be42569b] self.set_inventory_for_provider( [ 915.656682] env[69367]: ERROR nova.compute.manager [instance: b2c2b5d8-70ae-4fda-9926-c673be42569b] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 915.656682] env[69367]: ERROR nova.compute.manager [instance: b2c2b5d8-70ae-4fda-9926-c673be42569b] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 915.656682] env[69367]: ERROR nova.compute.manager [instance: b2c2b5d8-70ae-4fda-9926-c673be42569b] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-21fb3ff7-ec4f-4fab-946b-3a9621d24d8a"}]} [ 915.656682] env[69367]: ERROR nova.compute.manager [instance: b2c2b5d8-70ae-4fda-9926-c673be42569b] [ 915.656682] env[69367]: ERROR nova.compute.manager [instance: b2c2b5d8-70ae-4fda-9926-c673be42569b] During handling of the above exception, another exception occurred: [ 915.656682] env[69367]: ERROR nova.compute.manager [instance: b2c2b5d8-70ae-4fda-9926-c673be42569b] [ 915.656682] env[69367]: ERROR nova.compute.manager [instance: b2c2b5d8-70ae-4fda-9926-c673be42569b] Traceback (most recent call last): [ 915.656682] env[69367]: ERROR nova.compute.manager [instance: b2c2b5d8-70ae-4fda-9926-c673be42569b] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 915.656682] env[69367]: ERROR nova.compute.manager [instance: b2c2b5d8-70ae-4fda-9926-c673be42569b] with self.rt.instance_claim(context, instance, node, allocs, [ 915.656682] env[69367]: ERROR nova.compute.manager [instance: b2c2b5d8-70ae-4fda-9926-c673be42569b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 915.656682] env[69367]: ERROR nova.compute.manager [instance: b2c2b5d8-70ae-4fda-9926-c673be42569b] return f(*args, **kwargs) [ 915.656682] env[69367]: ERROR nova.compute.manager [instance: b2c2b5d8-70ae-4fda-9926-c673be42569b] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 915.656682] env[69367]: ERROR nova.compute.manager [instance: b2c2b5d8-70ae-4fda-9926-c673be42569b] self._update(elevated, cn) [ 915.656682] env[69367]: ERROR nova.compute.manager [instance: b2c2b5d8-70ae-4fda-9926-c673be42569b] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 915.656682] env[69367]: ERROR nova.compute.manager [instance: b2c2b5d8-70ae-4fda-9926-c673be42569b] self._update_to_placement(context, compute_node, startup) [ 915.656682] env[69367]: ERROR nova.compute.manager [instance: b2c2b5d8-70ae-4fda-9926-c673be42569b] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 915.656682] env[69367]: ERROR nova.compute.manager [instance: b2c2b5d8-70ae-4fda-9926-c673be42569b] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 915.656682] env[69367]: ERROR nova.compute.manager [instance: b2c2b5d8-70ae-4fda-9926-c673be42569b] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 915.656682] env[69367]: ERROR nova.compute.manager [instance: b2c2b5d8-70ae-4fda-9926-c673be42569b] return attempt.get(self._wrap_exception) [ 915.656682] env[69367]: ERROR nova.compute.manager [instance: b2c2b5d8-70ae-4fda-9926-c673be42569b] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 915.656682] env[69367]: ERROR nova.compute.manager [instance: b2c2b5d8-70ae-4fda-9926-c673be42569b] six.reraise(self.value[0], self.value[1], self.value[2]) [ 915.656682] env[69367]: ERROR nova.compute.manager [instance: b2c2b5d8-70ae-4fda-9926-c673be42569b] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 915.656682] env[69367]: ERROR nova.compute.manager [instance: b2c2b5d8-70ae-4fda-9926-c673be42569b] raise value [ 915.656682] env[69367]: ERROR nova.compute.manager [instance: b2c2b5d8-70ae-4fda-9926-c673be42569b] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 915.656682] env[69367]: ERROR nova.compute.manager [instance: b2c2b5d8-70ae-4fda-9926-c673be42569b] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 915.656682] env[69367]: ERROR nova.compute.manager [instance: b2c2b5d8-70ae-4fda-9926-c673be42569b] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 915.656682] env[69367]: ERROR nova.compute.manager [instance: b2c2b5d8-70ae-4fda-9926-c673be42569b] self.reportclient.update_from_provider_tree( [ 915.656682] env[69367]: ERROR nova.compute.manager [instance: b2c2b5d8-70ae-4fda-9926-c673be42569b] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 915.657613] env[69367]: ERROR nova.compute.manager [instance: b2c2b5d8-70ae-4fda-9926-c673be42569b] with catch_all(pd.uuid): [ 915.657613] env[69367]: ERROR nova.compute.manager [instance: b2c2b5d8-70ae-4fda-9926-c673be42569b] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 915.657613] env[69367]: ERROR nova.compute.manager [instance: b2c2b5d8-70ae-4fda-9926-c673be42569b] self.gen.throw(typ, value, traceback) [ 915.657613] env[69367]: ERROR nova.compute.manager [instance: b2c2b5d8-70ae-4fda-9926-c673be42569b] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 915.657613] env[69367]: ERROR nova.compute.manager [instance: b2c2b5d8-70ae-4fda-9926-c673be42569b] raise exception.ResourceProviderSyncFailed() [ 915.657613] env[69367]: ERROR nova.compute.manager [instance: b2c2b5d8-70ae-4fda-9926-c673be42569b] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 915.657613] env[69367]: ERROR nova.compute.manager [instance: b2c2b5d8-70ae-4fda-9926-c673be42569b] [ 915.657613] env[69367]: DEBUG nova.compute.utils [None req-bcce9e51-1784-4f95-8f13-28eea1be51a8 tempest-InstanceActionsNegativeTestJSON-92931964 tempest-InstanceActionsNegativeTestJSON-92931964-project-member] [instance: b2c2b5d8-70ae-4fda-9926-c673be42569b] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 915.659059] env[69367]: DEBUG oslo_concurrency.lockutils [None req-76469027-b21c-4a45-a033-84e9fe63214f tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.303s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 915.660695] env[69367]: INFO nova.compute.claims [None req-76469027-b21c-4a45-a033-84e9fe63214f tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [instance: dd8a6c15-b61f-43bd-97e3-bf67853594b5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 915.663736] env[69367]: DEBUG nova.compute.manager [None req-bcce9e51-1784-4f95-8f13-28eea1be51a8 tempest-InstanceActionsNegativeTestJSON-92931964 tempest-InstanceActionsNegativeTestJSON-92931964-project-member] [instance: b2c2b5d8-70ae-4fda-9926-c673be42569b] Build of instance b2c2b5d8-70ae-4fda-9926-c673be42569b was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 915.664195] env[69367]: DEBUG nova.compute.manager [None req-bcce9e51-1784-4f95-8f13-28eea1be51a8 tempest-InstanceActionsNegativeTestJSON-92931964 tempest-InstanceActionsNegativeTestJSON-92931964-project-member] [instance: b2c2b5d8-70ae-4fda-9926-c673be42569b] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 915.664434] env[69367]: DEBUG oslo_concurrency.lockutils [None req-bcce9e51-1784-4f95-8f13-28eea1be51a8 tempest-InstanceActionsNegativeTestJSON-92931964 tempest-InstanceActionsNegativeTestJSON-92931964-project-member] Acquiring lock "refresh_cache-b2c2b5d8-70ae-4fda-9926-c673be42569b" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 915.664567] env[69367]: DEBUG oslo_concurrency.lockutils [None req-bcce9e51-1784-4f95-8f13-28eea1be51a8 tempest-InstanceActionsNegativeTestJSON-92931964 tempest-InstanceActionsNegativeTestJSON-92931964-project-member] Acquired lock "refresh_cache-b2c2b5d8-70ae-4fda-9926-c673be42569b" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 915.664726] env[69367]: DEBUG nova.network.neutron [None req-bcce9e51-1784-4f95-8f13-28eea1be51a8 tempest-InstanceActionsNegativeTestJSON-92931964 tempest-InstanceActionsNegativeTestJSON-92931964-project-member] [instance: b2c2b5d8-70ae-4fda-9926-c673be42569b] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 915.848464] env[69367]: INFO nova.compute.manager [None req-58e3c20d-1527-4eb1-809c-3e88b2533207 tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: e2639eea-9e67-45b5-acf0-5b015b4c0a1e] Took 1.02 seconds to deallocate network for instance. [ 915.972066] env[69367]: DEBUG oslo_concurrency.lockutils [None req-571148d0-967c-42f0-92e5-f4d3d30b31a6 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Releasing lock "refresh_cache-95efcff3-a81b-49fb-b85a-dae060c023b2" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 915.972591] env[69367]: DEBUG nova.compute.manager [None req-571148d0-967c-42f0-92e5-f4d3d30b31a6 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Start destroying the instance on the hypervisor. {{(pid=69367) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 915.972794] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-571148d0-967c-42f0-92e5-f4d3d30b31a6 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Destroying instance {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 915.973126] env[69367]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-26c216fd-ddfc-46ff-9f11-e1b36d1d8e72 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.983522] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff6de552-b80e-4083-b3ec-f56641493c4a {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.011145] env[69367]: WARNING nova.virt.vmwareapi.vmops [None req-571148d0-967c-42f0-92e5-f4d3d30b31a6 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 95efcff3-a81b-49fb-b85a-dae060c023b2 could not be found. [ 916.011355] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-571148d0-967c-42f0-92e5-f4d3d30b31a6 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Instance destroyed {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 916.011596] env[69367]: INFO nova.compute.manager [None req-571148d0-967c-42f0-92e5-f4d3d30b31a6 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Took 0.04 seconds to destroy the instance on the hypervisor. [ 916.011781] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-571148d0-967c-42f0-92e5-f4d3d30b31a6 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 916.012106] env[69367]: DEBUG nova.compute.manager [-] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 916.012161] env[69367]: DEBUG nova.network.neutron [-] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 916.028950] env[69367]: DEBUG nova.network.neutron [-] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 916.191081] env[69367]: DEBUG nova.network.neutron [None req-bcce9e51-1784-4f95-8f13-28eea1be51a8 tempest-InstanceActionsNegativeTestJSON-92931964 tempest-InstanceActionsNegativeTestJSON-92931964-project-member] [instance: b2c2b5d8-70ae-4fda-9926-c673be42569b] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 916.276842] env[69367]: DEBUG nova.network.neutron [None req-bcce9e51-1784-4f95-8f13-28eea1be51a8 tempest-InstanceActionsNegativeTestJSON-92931964 tempest-InstanceActionsNegativeTestJSON-92931964-project-member] [instance: b2c2b5d8-70ae-4fda-9926-c673be42569b] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 916.534974] env[69367]: DEBUG nova.network.neutron [-] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 916.695417] env[69367]: DEBUG nova.scheduler.client.report [None req-76469027-b21c-4a45-a033-84e9fe63214f tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 916.713341] env[69367]: DEBUG nova.scheduler.client.report [None req-76469027-b21c-4a45-a033-84e9fe63214f tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 916.713456] env[69367]: DEBUG nova.compute.provider_tree [None req-76469027-b21c-4a45-a033-84e9fe63214f tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 916.726390] env[69367]: DEBUG nova.scheduler.client.report [None req-76469027-b21c-4a45-a033-84e9fe63214f tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 916.749840] env[69367]: DEBUG nova.scheduler.client.report [None req-76469027-b21c-4a45-a033-84e9fe63214f tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 916.780649] env[69367]: DEBUG oslo_concurrency.lockutils [None req-bcce9e51-1784-4f95-8f13-28eea1be51a8 tempest-InstanceActionsNegativeTestJSON-92931964 tempest-InstanceActionsNegativeTestJSON-92931964-project-member] Releasing lock "refresh_cache-b2c2b5d8-70ae-4fda-9926-c673be42569b" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 916.780883] env[69367]: DEBUG nova.compute.manager [None req-bcce9e51-1784-4f95-8f13-28eea1be51a8 tempest-InstanceActionsNegativeTestJSON-92931964 tempest-InstanceActionsNegativeTestJSON-92931964-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 916.781081] env[69367]: DEBUG nova.compute.manager [None req-bcce9e51-1784-4f95-8f13-28eea1be51a8 tempest-InstanceActionsNegativeTestJSON-92931964 tempest-InstanceActionsNegativeTestJSON-92931964-project-member] [instance: b2c2b5d8-70ae-4fda-9926-c673be42569b] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 916.781257] env[69367]: DEBUG nova.network.neutron [None req-bcce9e51-1784-4f95-8f13-28eea1be51a8 tempest-InstanceActionsNegativeTestJSON-92931964 tempest-InstanceActionsNegativeTestJSON-92931964-project-member] [instance: b2c2b5d8-70ae-4fda-9926-c673be42569b] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 916.821702] env[69367]: DEBUG nova.network.neutron [None req-bcce9e51-1784-4f95-8f13-28eea1be51a8 tempest-InstanceActionsNegativeTestJSON-92931964 tempest-InstanceActionsNegativeTestJSON-92931964-project-member] [instance: b2c2b5d8-70ae-4fda-9926-c673be42569b] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 916.877895] env[69367]: INFO nova.scheduler.client.report [None req-58e3c20d-1527-4eb1-809c-3e88b2533207 tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Deleted allocations for instance e2639eea-9e67-45b5-acf0-5b015b4c0a1e [ 917.037915] env[69367]: INFO nova.compute.manager [-] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Took 1.03 seconds to deallocate network for instance. [ 917.217307] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a428ba99-633c-4cb3-b5d0-8f5d7bffb8e6 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.225632] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff013d20-6ee0-4714-baf5-2d6bf4d04e30 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.255926] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bc2669a-6b97-40e7-a802-40ebd03e9243 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.264805] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce82a300-32d5-489d-98e7-efb9bdbde547 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.281510] env[69367]: DEBUG nova.compute.provider_tree [None req-76469027-b21c-4a45-a033-84e9fe63214f tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 917.324105] env[69367]: DEBUG nova.network.neutron [None req-bcce9e51-1784-4f95-8f13-28eea1be51a8 tempest-InstanceActionsNegativeTestJSON-92931964 tempest-InstanceActionsNegativeTestJSON-92931964-project-member] [instance: b2c2b5d8-70ae-4fda-9926-c673be42569b] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 917.391832] env[69367]: DEBUG oslo_concurrency.lockutils [None req-58e3c20d-1527-4eb1-809c-3e88b2533207 tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Lock "e2639eea-9e67-45b5-acf0-5b015b4c0a1e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.514s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 917.547931] env[69367]: DEBUG oslo_concurrency.lockutils [None req-571148d0-967c-42f0-92e5-f4d3d30b31a6 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 917.804173] env[69367]: ERROR nova.scheduler.client.report [None req-76469027-b21c-4a45-a033-84e9fe63214f tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [req-36bd955c-6e29-4503-a42e-aacf963ac75a] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-36bd955c-6e29-4503-a42e-aacf963ac75a"}]} [ 917.804632] env[69367]: DEBUG oslo_concurrency.lockutils [None req-76469027-b21c-4a45-a033-84e9fe63214f tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.146s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 917.805439] env[69367]: ERROR nova.compute.manager [None req-76469027-b21c-4a45-a033-84e9fe63214f tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [instance: dd8a6c15-b61f-43bd-97e3-bf67853594b5] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 917.805439] env[69367]: ERROR nova.compute.manager [instance: dd8a6c15-b61f-43bd-97e3-bf67853594b5] Traceback (most recent call last): [ 917.805439] env[69367]: ERROR nova.compute.manager [instance: dd8a6c15-b61f-43bd-97e3-bf67853594b5] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 917.805439] env[69367]: ERROR nova.compute.manager [instance: dd8a6c15-b61f-43bd-97e3-bf67853594b5] yield [ 917.805439] env[69367]: ERROR nova.compute.manager [instance: dd8a6c15-b61f-43bd-97e3-bf67853594b5] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 917.805439] env[69367]: ERROR nova.compute.manager [instance: dd8a6c15-b61f-43bd-97e3-bf67853594b5] self.set_inventory_for_provider( [ 917.805439] env[69367]: ERROR nova.compute.manager [instance: dd8a6c15-b61f-43bd-97e3-bf67853594b5] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 917.805439] env[69367]: ERROR nova.compute.manager [instance: dd8a6c15-b61f-43bd-97e3-bf67853594b5] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 917.805439] env[69367]: ERROR nova.compute.manager [instance: dd8a6c15-b61f-43bd-97e3-bf67853594b5] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-36bd955c-6e29-4503-a42e-aacf963ac75a"}]} [ 917.805439] env[69367]: ERROR nova.compute.manager [instance: dd8a6c15-b61f-43bd-97e3-bf67853594b5] [ 917.805439] env[69367]: ERROR nova.compute.manager [instance: dd8a6c15-b61f-43bd-97e3-bf67853594b5] During handling of the above exception, another exception occurred: [ 917.805439] env[69367]: ERROR nova.compute.manager [instance: dd8a6c15-b61f-43bd-97e3-bf67853594b5] [ 917.805439] env[69367]: ERROR nova.compute.manager [instance: dd8a6c15-b61f-43bd-97e3-bf67853594b5] Traceback (most recent call last): [ 917.805439] env[69367]: ERROR nova.compute.manager [instance: dd8a6c15-b61f-43bd-97e3-bf67853594b5] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 917.805439] env[69367]: ERROR nova.compute.manager [instance: dd8a6c15-b61f-43bd-97e3-bf67853594b5] with self.rt.instance_claim(context, instance, node, allocs, [ 917.805439] env[69367]: ERROR nova.compute.manager [instance: dd8a6c15-b61f-43bd-97e3-bf67853594b5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 917.805439] env[69367]: ERROR nova.compute.manager [instance: dd8a6c15-b61f-43bd-97e3-bf67853594b5] return f(*args, **kwargs) [ 917.805439] env[69367]: ERROR nova.compute.manager [instance: dd8a6c15-b61f-43bd-97e3-bf67853594b5] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 917.805439] env[69367]: ERROR nova.compute.manager [instance: dd8a6c15-b61f-43bd-97e3-bf67853594b5] self._update(elevated, cn) [ 917.805439] env[69367]: ERROR nova.compute.manager [instance: dd8a6c15-b61f-43bd-97e3-bf67853594b5] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 917.805439] env[69367]: ERROR nova.compute.manager [instance: dd8a6c15-b61f-43bd-97e3-bf67853594b5] self._update_to_placement(context, compute_node, startup) [ 917.805439] env[69367]: ERROR nova.compute.manager [instance: dd8a6c15-b61f-43bd-97e3-bf67853594b5] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 917.805439] env[69367]: ERROR nova.compute.manager [instance: dd8a6c15-b61f-43bd-97e3-bf67853594b5] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 917.805439] env[69367]: ERROR nova.compute.manager [instance: dd8a6c15-b61f-43bd-97e3-bf67853594b5] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 917.805439] env[69367]: ERROR nova.compute.manager [instance: dd8a6c15-b61f-43bd-97e3-bf67853594b5] return attempt.get(self._wrap_exception) [ 917.805439] env[69367]: ERROR nova.compute.manager [instance: dd8a6c15-b61f-43bd-97e3-bf67853594b5] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 917.805439] env[69367]: ERROR nova.compute.manager [instance: dd8a6c15-b61f-43bd-97e3-bf67853594b5] six.reraise(self.value[0], self.value[1], self.value[2]) [ 917.805439] env[69367]: ERROR nova.compute.manager [instance: dd8a6c15-b61f-43bd-97e3-bf67853594b5] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 917.805439] env[69367]: ERROR nova.compute.manager [instance: dd8a6c15-b61f-43bd-97e3-bf67853594b5] raise value [ 917.805439] env[69367]: ERROR nova.compute.manager [instance: dd8a6c15-b61f-43bd-97e3-bf67853594b5] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 917.805439] env[69367]: ERROR nova.compute.manager [instance: dd8a6c15-b61f-43bd-97e3-bf67853594b5] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 917.805439] env[69367]: ERROR nova.compute.manager [instance: dd8a6c15-b61f-43bd-97e3-bf67853594b5] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 917.805439] env[69367]: ERROR nova.compute.manager [instance: dd8a6c15-b61f-43bd-97e3-bf67853594b5] self.reportclient.update_from_provider_tree( [ 917.805439] env[69367]: ERROR nova.compute.manager [instance: dd8a6c15-b61f-43bd-97e3-bf67853594b5] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 917.807142] env[69367]: ERROR nova.compute.manager [instance: dd8a6c15-b61f-43bd-97e3-bf67853594b5] with catch_all(pd.uuid): [ 917.807142] env[69367]: ERROR nova.compute.manager [instance: dd8a6c15-b61f-43bd-97e3-bf67853594b5] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 917.807142] env[69367]: ERROR nova.compute.manager [instance: dd8a6c15-b61f-43bd-97e3-bf67853594b5] self.gen.throw(typ, value, traceback) [ 917.807142] env[69367]: ERROR nova.compute.manager [instance: dd8a6c15-b61f-43bd-97e3-bf67853594b5] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 917.807142] env[69367]: ERROR nova.compute.manager [instance: dd8a6c15-b61f-43bd-97e3-bf67853594b5] raise exception.ResourceProviderSyncFailed() [ 917.807142] env[69367]: ERROR nova.compute.manager [instance: dd8a6c15-b61f-43bd-97e3-bf67853594b5] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 917.807142] env[69367]: ERROR nova.compute.manager [instance: dd8a6c15-b61f-43bd-97e3-bf67853594b5] [ 917.807142] env[69367]: DEBUG nova.compute.utils [None req-76469027-b21c-4a45-a033-84e9fe63214f tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [instance: dd8a6c15-b61f-43bd-97e3-bf67853594b5] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 917.807854] env[69367]: DEBUG oslo_concurrency.lockutils [None req-9920216c-2ae6-4fb7-81d0-fbadfd2aa9c5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.216s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 917.808158] env[69367]: DEBUG oslo_concurrency.lockutils [None req-9920216c-2ae6-4fb7-81d0-fbadfd2aa9c5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 917.808718] env[69367]: INFO nova.compute.manager [None req-9920216c-2ae6-4fb7-81d0-fbadfd2aa9c5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] [instance: d900df05-b65c-4a45-94d1-563afbf9c022] Successfully reverted task state from None on failure for instance. [ 917.810807] env[69367]: DEBUG oslo_concurrency.lockutils [None req-c6dcbadb-7650-4ae1-8fea-d19613654f29 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.585s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 917.811094] env[69367]: DEBUG nova.objects.instance [None req-c6dcbadb-7650-4ae1-8fea-d19613654f29 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Lazy-loading 'resources' on Instance uuid f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3 {{(pid=69367) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 917.812310] env[69367]: DEBUG nova.compute.manager [None req-76469027-b21c-4a45-a033-84e9fe63214f tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [instance: dd8a6c15-b61f-43bd-97e3-bf67853594b5] Build of instance dd8a6c15-b61f-43bd-97e3-bf67853594b5 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 917.812795] env[69367]: DEBUG nova.compute.manager [None req-76469027-b21c-4a45-a033-84e9fe63214f tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [instance: dd8a6c15-b61f-43bd-97e3-bf67853594b5] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 917.813125] env[69367]: DEBUG oslo_concurrency.lockutils [None req-76469027-b21c-4a45-a033-84e9fe63214f tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Acquiring lock "refresh_cache-dd8a6c15-b61f-43bd-97e3-bf67853594b5" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 917.813374] env[69367]: DEBUG oslo_concurrency.lockutils [None req-76469027-b21c-4a45-a033-84e9fe63214f tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Acquired lock "refresh_cache-dd8a6c15-b61f-43bd-97e3-bf67853594b5" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 917.813629] env[69367]: DEBUG nova.network.neutron [None req-76469027-b21c-4a45-a033-84e9fe63214f tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [instance: dd8a6c15-b61f-43bd-97e3-bf67853594b5] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 917.815407] env[69367]: ERROR oslo_messaging.rpc.server [None req-9920216c-2ae6-4fb7-81d0-fbadfd2aa9c5 tempest-ServersNegativeTestJSON-19801240 tempest-ServersNegativeTestJSON-19801240-project-member] Exception during message handling: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 917.815407] env[69367]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 917.815407] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 917.815407] env[69367]: ERROR oslo_messaging.rpc.server yield [ 917.815407] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 917.815407] env[69367]: ERROR oslo_messaging.rpc.server self.set_inventory_for_provider( [ 917.815407] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 917.815407] env[69367]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 917.815407] env[69367]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-86fef5a8-a297-4ce6-b89e-dfb08cc23934"}]} [ 917.815407] env[69367]: ERROR oslo_messaging.rpc.server [ 917.815407] env[69367]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 917.815407] env[69367]: ERROR oslo_messaging.rpc.server [ 917.815407] env[69367]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 917.815407] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 174, in _process_incoming [ 917.815407] env[69367]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 917.815407] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 917.815407] env[69367]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 917.815407] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 917.815407] env[69367]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 917.815407] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 917.815407] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 917.815407] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 917.815407] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 917.815407] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 917.815407] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 917.815407] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 917.815407] env[69367]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 917.815407] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 167, in decorated_function [ 917.815407] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 917.815407] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 917.815407] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 917.815407] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 917.815407] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 917.815407] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 158, in decorated_function [ 917.815407] env[69367]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 917.815407] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1483, in decorated_function [ 917.815407] env[69367]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 917.815407] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 214, in decorated_function [ 917.815407] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 917.815407] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 917.815407] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 917.815407] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 917.815407] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 917.815407] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 204, in decorated_function [ 917.815407] env[69367]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 917.815407] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3388, in terminate_instance [ 917.815407] env[69367]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 917.817601] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 917.817601] env[69367]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 917.817601] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3383, in do_terminate_instance [ 917.817601] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 917.817601] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 917.817601] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 917.817601] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 917.817601] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 917.817601] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 917.817601] env[69367]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 917.817601] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 917.817601] env[69367]: ERROR oslo_messaging.rpc.server self._complete_deletion(context, instance) [ 917.817601] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 917.817601] env[69367]: ERROR oslo_messaging.rpc.server self._update_resource_tracker(context, instance) [ 917.817601] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 917.817601] env[69367]: ERROR oslo_messaging.rpc.server self.rt.update_usage(context, instance, instance.node) [ 917.817601] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 917.817601] env[69367]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 917.817601] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 917.817601] env[69367]: ERROR oslo_messaging.rpc.server self._update(context.elevated(), self.compute_nodes[nodename]) [ 917.817601] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 917.817601] env[69367]: ERROR oslo_messaging.rpc.server self._update_to_placement(context, compute_node, startup) [ 917.817601] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 917.817601] env[69367]: ERROR oslo_messaging.rpc.server return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 917.817601] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 917.817601] env[69367]: ERROR oslo_messaging.rpc.server return attempt.get(self._wrap_exception) [ 917.817601] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 917.817601] env[69367]: ERROR oslo_messaging.rpc.server six.reraise(self.value[0], self.value[1], self.value[2]) [ 917.817601] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 917.817601] env[69367]: ERROR oslo_messaging.rpc.server raise value [ 917.817601] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 917.817601] env[69367]: ERROR oslo_messaging.rpc.server attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 917.817601] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 917.817601] env[69367]: ERROR oslo_messaging.rpc.server self.reportclient.update_from_provider_tree( [ 917.817601] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 917.817601] env[69367]: ERROR oslo_messaging.rpc.server with catch_all(pd.uuid): [ 917.817601] env[69367]: ERROR oslo_messaging.rpc.server File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 917.817601] env[69367]: ERROR oslo_messaging.rpc.server self.gen.throw(typ, value, traceback) [ 917.817601] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 917.817601] env[69367]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderSyncFailed() [ 917.817601] env[69367]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 917.817601] env[69367]: ERROR oslo_messaging.rpc.server [ 917.827518] env[69367]: INFO nova.compute.manager [None req-bcce9e51-1784-4f95-8f13-28eea1be51a8 tempest-InstanceActionsNegativeTestJSON-92931964 tempest-InstanceActionsNegativeTestJSON-92931964-project-member] [instance: b2c2b5d8-70ae-4fda-9926-c673be42569b] Took 1.05 seconds to deallocate network for instance. [ 918.336033] env[69367]: DEBUG nova.scheduler.client.report [None req-c6dcbadb-7650-4ae1-8fea-d19613654f29 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 918.338819] env[69367]: DEBUG nova.network.neutron [None req-76469027-b21c-4a45-a033-84e9fe63214f tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [instance: dd8a6c15-b61f-43bd-97e3-bf67853594b5] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 918.351720] env[69367]: DEBUG nova.scheduler.client.report [None req-c6dcbadb-7650-4ae1-8fea-d19613654f29 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 918.351963] env[69367]: DEBUG nova.compute.provider_tree [None req-c6dcbadb-7650-4ae1-8fea-d19613654f29 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 918.364888] env[69367]: DEBUG nova.scheduler.client.report [None req-c6dcbadb-7650-4ae1-8fea-d19613654f29 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 918.385992] env[69367]: DEBUG nova.scheduler.client.report [None req-c6dcbadb-7650-4ae1-8fea-d19613654f29 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 918.455502] env[69367]: DEBUG nova.network.neutron [None req-76469027-b21c-4a45-a033-84e9fe63214f tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [instance: dd8a6c15-b61f-43bd-97e3-bf67853594b5] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 918.751802] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b939df7c-5264-4794-803d-51f8e869df86 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.760253] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d303a4c3-08e4-401a-a646-af55aaca435d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.794029] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b9c030f-b31f-45e8-824d-d228e9f1fdc3 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.803392] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8afaaf1-5b1b-430d-8260-32f8df2c2c34 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.822047] env[69367]: DEBUG nova.compute.provider_tree [None req-c6dcbadb-7650-4ae1-8fea-d19613654f29 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 918.864149] env[69367]: INFO nova.scheduler.client.report [None req-bcce9e51-1784-4f95-8f13-28eea1be51a8 tempest-InstanceActionsNegativeTestJSON-92931964 tempest-InstanceActionsNegativeTestJSON-92931964-project-member] Deleted allocations for instance b2c2b5d8-70ae-4fda-9926-c673be42569b [ 918.960308] env[69367]: DEBUG oslo_concurrency.lockutils [None req-76469027-b21c-4a45-a033-84e9fe63214f tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Releasing lock "refresh_cache-dd8a6c15-b61f-43bd-97e3-bf67853594b5" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 918.960616] env[69367]: DEBUG nova.compute.manager [None req-76469027-b21c-4a45-a033-84e9fe63214f tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 918.960753] env[69367]: DEBUG nova.compute.manager [None req-76469027-b21c-4a45-a033-84e9fe63214f tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [instance: dd8a6c15-b61f-43bd-97e3-bf67853594b5] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 918.960920] env[69367]: DEBUG nova.network.neutron [None req-76469027-b21c-4a45-a033-84e9fe63214f tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [instance: dd8a6c15-b61f-43bd-97e3-bf67853594b5] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 918.990385] env[69367]: DEBUG nova.network.neutron [None req-76469027-b21c-4a45-a033-84e9fe63214f tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [instance: dd8a6c15-b61f-43bd-97e3-bf67853594b5] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 919.344711] env[69367]: ERROR nova.scheduler.client.report [None req-c6dcbadb-7650-4ae1-8fea-d19613654f29 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [req-12753221-d11a-4309-83f7-22333b1bc58b] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-12753221-d11a-4309-83f7-22333b1bc58b"}]} [ 919.345138] env[69367]: DEBUG oslo_concurrency.lockutils [None req-c6dcbadb-7650-4ae1-8fea-d19613654f29 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.534s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 919.346179] env[69367]: ERROR nova.compute.manager [None req-c6dcbadb-7650-4ae1-8fea-d19613654f29 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] Setting instance vm_state to ERROR: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 919.346179] env[69367]: ERROR nova.compute.manager [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] Traceback (most recent call last): [ 919.346179] env[69367]: ERROR nova.compute.manager [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 919.346179] env[69367]: ERROR nova.compute.manager [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] yield [ 919.346179] env[69367]: ERROR nova.compute.manager [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 919.346179] env[69367]: ERROR nova.compute.manager [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] self.set_inventory_for_provider( [ 919.346179] env[69367]: ERROR nova.compute.manager [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 919.346179] env[69367]: ERROR nova.compute.manager [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 919.346179] env[69367]: ERROR nova.compute.manager [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-12753221-d11a-4309-83f7-22333b1bc58b"}]} [ 919.346179] env[69367]: ERROR nova.compute.manager [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] [ 919.346179] env[69367]: ERROR nova.compute.manager [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] During handling of the above exception, another exception occurred: [ 919.346179] env[69367]: ERROR nova.compute.manager [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] [ 919.346179] env[69367]: ERROR nova.compute.manager [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] Traceback (most recent call last): [ 919.346179] env[69367]: ERROR nova.compute.manager [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 919.346179] env[69367]: ERROR nova.compute.manager [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] self._delete_instance(context, instance, bdms) [ 919.346179] env[69367]: ERROR nova.compute.manager [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 919.346179] env[69367]: ERROR nova.compute.manager [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] self._complete_deletion(context, instance) [ 919.346179] env[69367]: ERROR nova.compute.manager [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 919.346179] env[69367]: ERROR nova.compute.manager [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] self._update_resource_tracker(context, instance) [ 919.346179] env[69367]: ERROR nova.compute.manager [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 919.346179] env[69367]: ERROR nova.compute.manager [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] self.rt.update_usage(context, instance, instance.node) [ 919.346179] env[69367]: ERROR nova.compute.manager [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 919.346179] env[69367]: ERROR nova.compute.manager [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] return f(*args, **kwargs) [ 919.346179] env[69367]: ERROR nova.compute.manager [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 919.346179] env[69367]: ERROR nova.compute.manager [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] self._update(context.elevated(), self.compute_nodes[nodename]) [ 919.346179] env[69367]: ERROR nova.compute.manager [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 919.346179] env[69367]: ERROR nova.compute.manager [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] self._update_to_placement(context, compute_node, startup) [ 919.346179] env[69367]: ERROR nova.compute.manager [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 919.346179] env[69367]: ERROR nova.compute.manager [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 919.346179] env[69367]: ERROR nova.compute.manager [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 919.346179] env[69367]: ERROR nova.compute.manager [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] return attempt.get(self._wrap_exception) [ 919.346179] env[69367]: ERROR nova.compute.manager [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 919.346179] env[69367]: ERROR nova.compute.manager [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] six.reraise(self.value[0], self.value[1], self.value[2]) [ 919.346179] env[69367]: ERROR nova.compute.manager [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 919.347102] env[69367]: ERROR nova.compute.manager [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] raise value [ 919.347102] env[69367]: ERROR nova.compute.manager [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 919.347102] env[69367]: ERROR nova.compute.manager [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 919.347102] env[69367]: ERROR nova.compute.manager [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 919.347102] env[69367]: ERROR nova.compute.manager [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] self.reportclient.update_from_provider_tree( [ 919.347102] env[69367]: ERROR nova.compute.manager [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 919.347102] env[69367]: ERROR nova.compute.manager [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] with catch_all(pd.uuid): [ 919.347102] env[69367]: ERROR nova.compute.manager [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 919.347102] env[69367]: ERROR nova.compute.manager [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] self.gen.throw(typ, value, traceback) [ 919.347102] env[69367]: ERROR nova.compute.manager [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 919.347102] env[69367]: ERROR nova.compute.manager [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] raise exception.ResourceProviderSyncFailed() [ 919.347102] env[69367]: ERROR nova.compute.manager [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 919.347102] env[69367]: ERROR nova.compute.manager [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] [ 919.350192] env[69367]: DEBUG oslo_concurrency.lockutils [None req-df9d3f41-569a-4508-a654-1d82f08ee23b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.840s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 919.350958] env[69367]: DEBUG oslo_concurrency.lockutils [None req-df9d3f41-569a-4508-a654-1d82f08ee23b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 919.351205] env[69367]: INFO nova.compute.manager [None req-df9d3f41-569a-4508-a654-1d82f08ee23b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Successfully reverted task state from None on failure for instance. [ 919.354642] env[69367]: DEBUG oslo_concurrency.lockutils [None req-22a6e440-2f28-49a6-9890-9c7ebb48122d tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.456s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 919.354911] env[69367]: DEBUG nova.objects.instance [None req-22a6e440-2f28-49a6-9890-9c7ebb48122d tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Lazy-loading 'resources' on Instance uuid 652e2e23-7927-46ce-b8af-fffdb6ac8a3e {{(pid=69367) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 919.358261] env[69367]: ERROR oslo_messaging.rpc.server [None req-df9d3f41-569a-4508-a654-1d82f08ee23b tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Exception during message handling: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 919.358261] env[69367]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 919.358261] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 919.358261] env[69367]: ERROR oslo_messaging.rpc.server yield [ 919.358261] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 919.358261] env[69367]: ERROR oslo_messaging.rpc.server self.set_inventory_for_provider( [ 919.358261] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 919.358261] env[69367]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 919.358261] env[69367]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-abe0ede3-4198-4d99-b4ea-8007eb0260c7"}]} [ 919.358261] env[69367]: ERROR oslo_messaging.rpc.server [ 919.358261] env[69367]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 919.358261] env[69367]: ERROR oslo_messaging.rpc.server [ 919.358261] env[69367]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 919.358261] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 174, in _process_incoming [ 919.358261] env[69367]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 919.358261] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 919.358261] env[69367]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 919.358261] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 919.358261] env[69367]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 919.358261] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 919.358261] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 919.358261] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 919.358261] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 919.358261] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 919.358261] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 919.358261] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 919.358261] env[69367]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 919.358261] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 167, in decorated_function [ 919.358261] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 919.358261] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 919.358261] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 919.358261] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 919.358261] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 919.358261] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 158, in decorated_function [ 919.358261] env[69367]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 919.358261] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1483, in decorated_function [ 919.358261] env[69367]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 919.358261] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 214, in decorated_function [ 919.358261] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 919.358261] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 919.358261] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 919.358261] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 919.358261] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 919.358261] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 204, in decorated_function [ 919.358261] env[69367]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 919.358261] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3388, in terminate_instance [ 919.358261] env[69367]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 919.358261] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 919.359468] env[69367]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 919.359468] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3383, in do_terminate_instance [ 919.359468] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 919.359468] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 919.359468] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 919.359468] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 919.359468] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 919.359468] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 919.359468] env[69367]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 919.359468] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 919.359468] env[69367]: ERROR oslo_messaging.rpc.server self._complete_deletion(context, instance) [ 919.359468] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 919.359468] env[69367]: ERROR oslo_messaging.rpc.server self._update_resource_tracker(context, instance) [ 919.359468] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 919.359468] env[69367]: ERROR oslo_messaging.rpc.server self.rt.update_usage(context, instance, instance.node) [ 919.359468] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 919.359468] env[69367]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 919.359468] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 919.359468] env[69367]: ERROR oslo_messaging.rpc.server self._update(context.elevated(), self.compute_nodes[nodename]) [ 919.359468] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 919.359468] env[69367]: ERROR oslo_messaging.rpc.server self._update_to_placement(context, compute_node, startup) [ 919.359468] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 919.359468] env[69367]: ERROR oslo_messaging.rpc.server return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 919.359468] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 919.359468] env[69367]: ERROR oslo_messaging.rpc.server return attempt.get(self._wrap_exception) [ 919.359468] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 919.359468] env[69367]: ERROR oslo_messaging.rpc.server six.reraise(self.value[0], self.value[1], self.value[2]) [ 919.359468] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 919.359468] env[69367]: ERROR oslo_messaging.rpc.server raise value [ 919.359468] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 919.359468] env[69367]: ERROR oslo_messaging.rpc.server attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 919.359468] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 919.359468] env[69367]: ERROR oslo_messaging.rpc.server self.reportclient.update_from_provider_tree( [ 919.359468] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 919.359468] env[69367]: ERROR oslo_messaging.rpc.server with catch_all(pd.uuid): [ 919.359468] env[69367]: ERROR oslo_messaging.rpc.server File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 919.359468] env[69367]: ERROR oslo_messaging.rpc.server self.gen.throw(typ, value, traceback) [ 919.359468] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 919.359468] env[69367]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderSyncFailed() [ 919.359468] env[69367]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 919.359468] env[69367]: ERROR oslo_messaging.rpc.server [ 919.374142] env[69367]: DEBUG oslo_concurrency.lockutils [None req-bcce9e51-1784-4f95-8f13-28eea1be51a8 tempest-InstanceActionsNegativeTestJSON-92931964 tempest-InstanceActionsNegativeTestJSON-92931964-project-member] Lock "b2c2b5d8-70ae-4fda-9926-c673be42569b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.794s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 919.490426] env[69367]: DEBUG nova.network.neutron [None req-76469027-b21c-4a45-a033-84e9fe63214f tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [instance: dd8a6c15-b61f-43bd-97e3-bf67853594b5] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 919.864434] env[69367]: DEBUG oslo_concurrency.lockutils [None req-c6dcbadb-7650-4ae1-8fea-d19613654f29 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Lock "f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.118s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 919.892603] env[69367]: DEBUG nova.scheduler.client.report [None req-22a6e440-2f28-49a6-9890-9c7ebb48122d tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 919.918317] env[69367]: DEBUG nova.scheduler.client.report [None req-22a6e440-2f28-49a6-9890-9c7ebb48122d tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 919.918622] env[69367]: DEBUG nova.compute.provider_tree [None req-22a6e440-2f28-49a6-9890-9c7ebb48122d tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 919.942339] env[69367]: DEBUG nova.scheduler.client.report [None req-22a6e440-2f28-49a6-9890-9c7ebb48122d tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 919.996484] env[69367]: INFO nova.compute.manager [None req-76469027-b21c-4a45-a033-84e9fe63214f tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [instance: dd8a6c15-b61f-43bd-97e3-bf67853594b5] Took 1.03 seconds to deallocate network for instance. [ 920.068213] env[69367]: DEBUG nova.scheduler.client.report [None req-22a6e440-2f28-49a6-9890-9c7ebb48122d tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 920.490467] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32d305b0-52f9-43d9-99b5-6be4a074651d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.500012] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5de3cea1-9f16-43fb-89a1-83479484e416 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.537920] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f2b2828-5eb8-4214-867d-66d68d169593 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.547028] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21f5e138-608a-4cfb-8b28-54e74fa03dbd {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.563446] env[69367]: DEBUG nova.compute.provider_tree [None req-22a6e440-2f28-49a6-9890-9c7ebb48122d tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 921.036481] env[69367]: INFO nova.scheduler.client.report [None req-76469027-b21c-4a45-a033-84e9fe63214f tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Deleted allocations for instance dd8a6c15-b61f-43bd-97e3-bf67853594b5 [ 921.092644] env[69367]: ERROR nova.scheduler.client.report [None req-22a6e440-2f28-49a6-9890-9c7ebb48122d tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [req-28bd03b6-6b3e-4b71-9c8f-61d14f3af9d6] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-28bd03b6-6b3e-4b71-9c8f-61d14f3af9d6"}]} [ 921.093343] env[69367]: DEBUG oslo_concurrency.lockutils [None req-22a6e440-2f28-49a6-9890-9c7ebb48122d tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.738s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 921.093817] env[69367]: ERROR nova.compute.manager [None req-22a6e440-2f28-49a6-9890-9c7ebb48122d tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Setting instance vm_state to ERROR: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 921.093817] env[69367]: ERROR nova.compute.manager [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Traceback (most recent call last): [ 921.093817] env[69367]: ERROR nova.compute.manager [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 921.093817] env[69367]: ERROR nova.compute.manager [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] yield [ 921.093817] env[69367]: ERROR nova.compute.manager [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 921.093817] env[69367]: ERROR nova.compute.manager [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] self.set_inventory_for_provider( [ 921.093817] env[69367]: ERROR nova.compute.manager [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 921.093817] env[69367]: ERROR nova.compute.manager [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 921.093817] env[69367]: ERROR nova.compute.manager [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-28bd03b6-6b3e-4b71-9c8f-61d14f3af9d6"}]} [ 921.093817] env[69367]: ERROR nova.compute.manager [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] [ 921.093817] env[69367]: ERROR nova.compute.manager [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] During handling of the above exception, another exception occurred: [ 921.093817] env[69367]: ERROR nova.compute.manager [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] [ 921.093817] env[69367]: ERROR nova.compute.manager [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Traceback (most recent call last): [ 921.093817] env[69367]: ERROR nova.compute.manager [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 921.093817] env[69367]: ERROR nova.compute.manager [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] self._delete_instance(context, instance, bdms) [ 921.093817] env[69367]: ERROR nova.compute.manager [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 921.093817] env[69367]: ERROR nova.compute.manager [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] self._complete_deletion(context, instance) [ 921.093817] env[69367]: ERROR nova.compute.manager [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 921.093817] env[69367]: ERROR nova.compute.manager [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] self._update_resource_tracker(context, instance) [ 921.093817] env[69367]: ERROR nova.compute.manager [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 921.093817] env[69367]: ERROR nova.compute.manager [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] self.rt.update_usage(context, instance, instance.node) [ 921.093817] env[69367]: ERROR nova.compute.manager [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 921.093817] env[69367]: ERROR nova.compute.manager [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] return f(*args, **kwargs) [ 921.093817] env[69367]: ERROR nova.compute.manager [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 921.093817] env[69367]: ERROR nova.compute.manager [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] self._update(context.elevated(), self.compute_nodes[nodename]) [ 921.093817] env[69367]: ERROR nova.compute.manager [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 921.093817] env[69367]: ERROR nova.compute.manager [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] self._update_to_placement(context, compute_node, startup) [ 921.093817] env[69367]: ERROR nova.compute.manager [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 921.093817] env[69367]: ERROR nova.compute.manager [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 921.093817] env[69367]: ERROR nova.compute.manager [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 921.093817] env[69367]: ERROR nova.compute.manager [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] return attempt.get(self._wrap_exception) [ 921.093817] env[69367]: ERROR nova.compute.manager [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 921.093817] env[69367]: ERROR nova.compute.manager [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] six.reraise(self.value[0], self.value[1], self.value[2]) [ 921.093817] env[69367]: ERROR nova.compute.manager [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 921.094901] env[69367]: ERROR nova.compute.manager [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] raise value [ 921.094901] env[69367]: ERROR nova.compute.manager [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 921.094901] env[69367]: ERROR nova.compute.manager [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 921.094901] env[69367]: ERROR nova.compute.manager [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 921.094901] env[69367]: ERROR nova.compute.manager [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] self.reportclient.update_from_provider_tree( [ 921.094901] env[69367]: ERROR nova.compute.manager [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 921.094901] env[69367]: ERROR nova.compute.manager [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] with catch_all(pd.uuid): [ 921.094901] env[69367]: ERROR nova.compute.manager [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 921.094901] env[69367]: ERROR nova.compute.manager [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] self.gen.throw(typ, value, traceback) [ 921.094901] env[69367]: ERROR nova.compute.manager [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 921.094901] env[69367]: ERROR nova.compute.manager [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] raise exception.ResourceProviderSyncFailed() [ 921.094901] env[69367]: ERROR nova.compute.manager [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 921.094901] env[69367]: ERROR nova.compute.manager [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] [ 921.096408] env[69367]: DEBUG oslo_concurrency.lockutils [None req-dbe267e5-e67b-4ed3-9d86-cb6e320e7fb1 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.142s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 921.097877] env[69367]: INFO nova.compute.claims [None req-dbe267e5-e67b-4ed3-9d86-cb6e320e7fb1 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 31a4eae5-2dc3-4eec-aa93-d73d4f1be5ba] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 921.387619] env[69367]: DEBUG oslo_concurrency.lockutils [None req-c6dcbadb-7650-4ae1-8fea-d19613654f29 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 921.545054] env[69367]: DEBUG oslo_concurrency.lockutils [None req-76469027-b21c-4a45-a033-84e9fe63214f tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Lock "dd8a6c15-b61f-43bd-97e3-bf67853594b5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.217s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 921.588849] env[69367]: DEBUG oslo_concurrency.lockutils [None req-62cd3237-0339-403e-aaf2-eb3e6851ac2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Acquiring lock "882f1751-5f90-43f8-92d6-c174a6aad09b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 921.588849] env[69367]: DEBUG oslo_concurrency.lockutils [None req-62cd3237-0339-403e-aaf2-eb3e6851ac2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Lock "882f1751-5f90-43f8-92d6-c174a6aad09b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 921.604038] env[69367]: DEBUG oslo_concurrency.lockutils [None req-22a6e440-2f28-49a6-9890-9c7ebb48122d tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Lock "652e2e23-7927-46ce-b8af-fffdb6ac8a3e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.761s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 922.094158] env[69367]: DEBUG nova.compute.manager [None req-62cd3237-0339-403e-aaf2-eb3e6851ac2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: 882f1751-5f90-43f8-92d6-c174a6aad09b] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 922.149037] env[69367]: DEBUG nova.scheduler.client.report [None req-dbe267e5-e67b-4ed3-9d86-cb6e320e7fb1 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 922.170861] env[69367]: DEBUG nova.scheduler.client.report [None req-dbe267e5-e67b-4ed3-9d86-cb6e320e7fb1 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 922.171337] env[69367]: DEBUG nova.compute.provider_tree [None req-dbe267e5-e67b-4ed3-9d86-cb6e320e7fb1 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 922.175098] env[69367]: DEBUG oslo_concurrency.lockutils [None req-df57d2c5-5562-4103-b0a0-49bd43abf9bb tempest-ServerGroupTestJSON-565755592 tempest-ServerGroupTestJSON-565755592-project-member] Acquiring lock "a1714871-0888-4957-9f26-18ddc4b73ecd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 922.178020] env[69367]: DEBUG oslo_concurrency.lockutils [None req-df57d2c5-5562-4103-b0a0-49bd43abf9bb tempest-ServerGroupTestJSON-565755592 tempest-ServerGroupTestJSON-565755592-project-member] Lock "a1714871-0888-4957-9f26-18ddc4b73ecd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 922.190413] env[69367]: DEBUG nova.scheduler.client.report [None req-dbe267e5-e67b-4ed3-9d86-cb6e320e7fb1 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 922.214383] env[69367]: DEBUG nova.scheduler.client.report [None req-dbe267e5-e67b-4ed3-9d86-cb6e320e7fb1 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 922.251796] env[69367]: DEBUG oslo_concurrency.lockutils [None req-8546bb22-6407-4505-bd4b-0996daabbd6b tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Acquiring lock "d2f8328d-fd05-4e63-9cbd-a6e3ec948964" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 922.251960] env[69367]: DEBUG oslo_concurrency.lockutils [None req-8546bb22-6407-4505-bd4b-0996daabbd6b tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Lock "d2f8328d-fd05-4e63-9cbd-a6e3ec948964" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 922.253116] env[69367]: DEBUG oslo_concurrency.lockutils [None req-8546bb22-6407-4505-bd4b-0996daabbd6b tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Acquiring lock "d2f8328d-fd05-4e63-9cbd-a6e3ec948964-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 922.253116] env[69367]: DEBUG oslo_concurrency.lockutils [None req-8546bb22-6407-4505-bd4b-0996daabbd6b tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Lock "d2f8328d-fd05-4e63-9cbd-a6e3ec948964-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 922.253116] env[69367]: DEBUG oslo_concurrency.lockutils [None req-8546bb22-6407-4505-bd4b-0996daabbd6b tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Lock "d2f8328d-fd05-4e63-9cbd-a6e3ec948964-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 922.258250] env[69367]: INFO nova.compute.manager [None req-8546bb22-6407-4505-bd4b-0996daabbd6b tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Terminating instance [ 922.618875] env[69367]: DEBUG oslo_concurrency.lockutils [None req-62cd3237-0339-403e-aaf2-eb3e6851ac2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 922.641100] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be59fd73-02d0-4f0f-88fd-080dc82550df {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.652259] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac242c7e-66d7-4dda-ae47-15b6f5f63c26 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.684688] env[69367]: DEBUG nova.compute.manager [None req-df57d2c5-5562-4103-b0a0-49bd43abf9bb tempest-ServerGroupTestJSON-565755592 tempest-ServerGroupTestJSON-565755592-project-member] [instance: a1714871-0888-4957-9f26-18ddc4b73ecd] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 922.688050] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ebeef86-edf4-4306-a8aa-8d9252b67c38 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.696694] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf9f9dfa-6e36-4c94-ab3b-7cf6e1d248bc {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.714576] env[69367]: DEBUG nova.compute.provider_tree [None req-dbe267e5-e67b-4ed3-9d86-cb6e320e7fb1 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 922.766205] env[69367]: DEBUG oslo_concurrency.lockutils [None req-8546bb22-6407-4505-bd4b-0996daabbd6b tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Acquiring lock "refresh_cache-d2f8328d-fd05-4e63-9cbd-a6e3ec948964" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 922.766425] env[69367]: DEBUG oslo_concurrency.lockutils [None req-8546bb22-6407-4505-bd4b-0996daabbd6b tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Acquired lock "refresh_cache-d2f8328d-fd05-4e63-9cbd-a6e3ec948964" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 922.766618] env[69367]: DEBUG nova.network.neutron [None req-8546bb22-6407-4505-bd4b-0996daabbd6b tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 922.914836] env[69367]: DEBUG oslo_concurrency.lockutils [None req-c1478397-0ed0-419b-94b5-970c8f06f7f3 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Acquiring lock "652e2e23-7927-46ce-b8af-fffdb6ac8a3e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 922.914905] env[69367]: DEBUG oslo_concurrency.lockutils [None req-c1478397-0ed0-419b-94b5-970c8f06f7f3 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Lock "652e2e23-7927-46ce-b8af-fffdb6ac8a3e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 922.915539] env[69367]: DEBUG oslo_concurrency.lockutils [None req-c1478397-0ed0-419b-94b5-970c8f06f7f3 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Acquiring lock "652e2e23-7927-46ce-b8af-fffdb6ac8a3e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 922.916449] env[69367]: DEBUG oslo_concurrency.lockutils [None req-c1478397-0ed0-419b-94b5-970c8f06f7f3 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Lock "652e2e23-7927-46ce-b8af-fffdb6ac8a3e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 922.916659] env[69367]: DEBUG oslo_concurrency.lockutils [None req-c1478397-0ed0-419b-94b5-970c8f06f7f3 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Lock "652e2e23-7927-46ce-b8af-fffdb6ac8a3e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 922.919546] env[69367]: INFO nova.compute.manager [None req-c1478397-0ed0-419b-94b5-970c8f06f7f3 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Terminating instance [ 923.119737] env[69367]: DEBUG oslo_concurrency.lockutils [None req-22a6e440-2f28-49a6-9890-9c7ebb48122d tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 923.211716] env[69367]: DEBUG oslo_concurrency.lockutils [None req-661b4779-e44f-4925-a887-df3506a5c644 tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Acquiring lock "46237e91-87ba-4b91-af8c-84d8dde87508" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 923.212112] env[69367]: DEBUG oslo_concurrency.lockutils [None req-661b4779-e44f-4925-a887-df3506a5c644 tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Lock "46237e91-87ba-4b91-af8c-84d8dde87508" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 923.214890] env[69367]: DEBUG oslo_concurrency.lockutils [None req-df57d2c5-5562-4103-b0a0-49bd43abf9bb tempest-ServerGroupTestJSON-565755592 tempest-ServerGroupTestJSON-565755592-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 923.242393] env[69367]: ERROR nova.scheduler.client.report [None req-dbe267e5-e67b-4ed3-9d86-cb6e320e7fb1 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [req-d8e584e0-3657-492c-a44d-2b4d790b40fb] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-d8e584e0-3657-492c-a44d-2b4d790b40fb"}]} [ 923.242788] env[69367]: DEBUG oslo_concurrency.lockutils [None req-dbe267e5-e67b-4ed3-9d86-cb6e320e7fb1 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.147s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 923.243488] env[69367]: ERROR nova.compute.manager [None req-dbe267e5-e67b-4ed3-9d86-cb6e320e7fb1 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 31a4eae5-2dc3-4eec-aa93-d73d4f1be5ba] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 923.243488] env[69367]: ERROR nova.compute.manager [instance: 31a4eae5-2dc3-4eec-aa93-d73d4f1be5ba] Traceback (most recent call last): [ 923.243488] env[69367]: ERROR nova.compute.manager [instance: 31a4eae5-2dc3-4eec-aa93-d73d4f1be5ba] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 923.243488] env[69367]: ERROR nova.compute.manager [instance: 31a4eae5-2dc3-4eec-aa93-d73d4f1be5ba] yield [ 923.243488] env[69367]: ERROR nova.compute.manager [instance: 31a4eae5-2dc3-4eec-aa93-d73d4f1be5ba] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 923.243488] env[69367]: ERROR nova.compute.manager [instance: 31a4eae5-2dc3-4eec-aa93-d73d4f1be5ba] self.set_inventory_for_provider( [ 923.243488] env[69367]: ERROR nova.compute.manager [instance: 31a4eae5-2dc3-4eec-aa93-d73d4f1be5ba] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 923.243488] env[69367]: ERROR nova.compute.manager [instance: 31a4eae5-2dc3-4eec-aa93-d73d4f1be5ba] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 923.243488] env[69367]: ERROR nova.compute.manager [instance: 31a4eae5-2dc3-4eec-aa93-d73d4f1be5ba] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-d8e584e0-3657-492c-a44d-2b4d790b40fb"}]} [ 923.243488] env[69367]: ERROR nova.compute.manager [instance: 31a4eae5-2dc3-4eec-aa93-d73d4f1be5ba] [ 923.243488] env[69367]: ERROR nova.compute.manager [instance: 31a4eae5-2dc3-4eec-aa93-d73d4f1be5ba] During handling of the above exception, another exception occurred: [ 923.243488] env[69367]: ERROR nova.compute.manager [instance: 31a4eae5-2dc3-4eec-aa93-d73d4f1be5ba] [ 923.243488] env[69367]: ERROR nova.compute.manager [instance: 31a4eae5-2dc3-4eec-aa93-d73d4f1be5ba] Traceback (most recent call last): [ 923.243488] env[69367]: ERROR nova.compute.manager [instance: 31a4eae5-2dc3-4eec-aa93-d73d4f1be5ba] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 923.243488] env[69367]: ERROR nova.compute.manager [instance: 31a4eae5-2dc3-4eec-aa93-d73d4f1be5ba] with self.rt.instance_claim(context, instance, node, allocs, [ 923.243488] env[69367]: ERROR nova.compute.manager [instance: 31a4eae5-2dc3-4eec-aa93-d73d4f1be5ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 923.243488] env[69367]: ERROR nova.compute.manager [instance: 31a4eae5-2dc3-4eec-aa93-d73d4f1be5ba] return f(*args, **kwargs) [ 923.243488] env[69367]: ERROR nova.compute.manager [instance: 31a4eae5-2dc3-4eec-aa93-d73d4f1be5ba] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 923.243488] env[69367]: ERROR nova.compute.manager [instance: 31a4eae5-2dc3-4eec-aa93-d73d4f1be5ba] self._update(elevated, cn) [ 923.243488] env[69367]: ERROR nova.compute.manager [instance: 31a4eae5-2dc3-4eec-aa93-d73d4f1be5ba] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 923.243488] env[69367]: ERROR nova.compute.manager [instance: 31a4eae5-2dc3-4eec-aa93-d73d4f1be5ba] self._update_to_placement(context, compute_node, startup) [ 923.243488] env[69367]: ERROR nova.compute.manager [instance: 31a4eae5-2dc3-4eec-aa93-d73d4f1be5ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 923.243488] env[69367]: ERROR nova.compute.manager [instance: 31a4eae5-2dc3-4eec-aa93-d73d4f1be5ba] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 923.243488] env[69367]: ERROR nova.compute.manager [instance: 31a4eae5-2dc3-4eec-aa93-d73d4f1be5ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 923.243488] env[69367]: ERROR nova.compute.manager [instance: 31a4eae5-2dc3-4eec-aa93-d73d4f1be5ba] return attempt.get(self._wrap_exception) [ 923.243488] env[69367]: ERROR nova.compute.manager [instance: 31a4eae5-2dc3-4eec-aa93-d73d4f1be5ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 923.243488] env[69367]: ERROR nova.compute.manager [instance: 31a4eae5-2dc3-4eec-aa93-d73d4f1be5ba] six.reraise(self.value[0], self.value[1], self.value[2]) [ 923.243488] env[69367]: ERROR nova.compute.manager [instance: 31a4eae5-2dc3-4eec-aa93-d73d4f1be5ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 923.243488] env[69367]: ERROR nova.compute.manager [instance: 31a4eae5-2dc3-4eec-aa93-d73d4f1be5ba] raise value [ 923.243488] env[69367]: ERROR nova.compute.manager [instance: 31a4eae5-2dc3-4eec-aa93-d73d4f1be5ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 923.243488] env[69367]: ERROR nova.compute.manager [instance: 31a4eae5-2dc3-4eec-aa93-d73d4f1be5ba] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 923.243488] env[69367]: ERROR nova.compute.manager [instance: 31a4eae5-2dc3-4eec-aa93-d73d4f1be5ba] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 923.243488] env[69367]: ERROR nova.compute.manager [instance: 31a4eae5-2dc3-4eec-aa93-d73d4f1be5ba] self.reportclient.update_from_provider_tree( [ 923.243488] env[69367]: ERROR nova.compute.manager [instance: 31a4eae5-2dc3-4eec-aa93-d73d4f1be5ba] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 923.244526] env[69367]: ERROR nova.compute.manager [instance: 31a4eae5-2dc3-4eec-aa93-d73d4f1be5ba] with catch_all(pd.uuid): [ 923.244526] env[69367]: ERROR nova.compute.manager [instance: 31a4eae5-2dc3-4eec-aa93-d73d4f1be5ba] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 923.244526] env[69367]: ERROR nova.compute.manager [instance: 31a4eae5-2dc3-4eec-aa93-d73d4f1be5ba] self.gen.throw(typ, value, traceback) [ 923.244526] env[69367]: ERROR nova.compute.manager [instance: 31a4eae5-2dc3-4eec-aa93-d73d4f1be5ba] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 923.244526] env[69367]: ERROR nova.compute.manager [instance: 31a4eae5-2dc3-4eec-aa93-d73d4f1be5ba] raise exception.ResourceProviderSyncFailed() [ 923.244526] env[69367]: ERROR nova.compute.manager [instance: 31a4eae5-2dc3-4eec-aa93-d73d4f1be5ba] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 923.244526] env[69367]: ERROR nova.compute.manager [instance: 31a4eae5-2dc3-4eec-aa93-d73d4f1be5ba] [ 923.244526] env[69367]: DEBUG nova.compute.utils [None req-dbe267e5-e67b-4ed3-9d86-cb6e320e7fb1 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 31a4eae5-2dc3-4eec-aa93-d73d4f1be5ba] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 923.246336] env[69367]: DEBUG oslo_concurrency.lockutils [None req-571148d0-967c-42f0-92e5-f4d3d30b31a6 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.699s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 923.246569] env[69367]: DEBUG oslo_concurrency.lockutils [None req-571148d0-967c-42f0-92e5-f4d3d30b31a6 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 923.249120] env[69367]: DEBUG oslo_concurrency.lockutils [None req-c6dcbadb-7650-4ae1-8fea-d19613654f29 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.862s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 923.249334] env[69367]: DEBUG oslo_concurrency.lockutils [None req-c6dcbadb-7650-4ae1-8fea-d19613654f29 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 923.249687] env[69367]: INFO nova.compute.manager [None req-c6dcbadb-7650-4ae1-8fea-d19613654f29 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] Successfully reverted task state from None on failure for instance. [ 923.252695] env[69367]: DEBUG oslo_concurrency.lockutils [None req-62cd3237-0339-403e-aaf2-eb3e6851ac2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.635s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 923.254505] env[69367]: INFO nova.compute.claims [None req-62cd3237-0339-403e-aaf2-eb3e6851ac2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: 882f1751-5f90-43f8-92d6-c174a6aad09b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 923.258016] env[69367]: DEBUG nova.compute.manager [None req-dbe267e5-e67b-4ed3-9d86-cb6e320e7fb1 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 31a4eae5-2dc3-4eec-aa93-d73d4f1be5ba] Build of instance 31a4eae5-2dc3-4eec-aa93-d73d4f1be5ba was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 923.258623] env[69367]: DEBUG nova.compute.manager [None req-dbe267e5-e67b-4ed3-9d86-cb6e320e7fb1 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 31a4eae5-2dc3-4eec-aa93-d73d4f1be5ba] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 923.259059] env[69367]: DEBUG oslo_concurrency.lockutils [None req-dbe267e5-e67b-4ed3-9d86-cb6e320e7fb1 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Acquiring lock "refresh_cache-31a4eae5-2dc3-4eec-aa93-d73d4f1be5ba" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 923.259246] env[69367]: DEBUG oslo_concurrency.lockutils [None req-dbe267e5-e67b-4ed3-9d86-cb6e320e7fb1 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Acquired lock "refresh_cache-31a4eae5-2dc3-4eec-aa93-d73d4f1be5ba" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 923.259446] env[69367]: DEBUG nova.network.neutron [None req-dbe267e5-e67b-4ed3-9d86-cb6e320e7fb1 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 31a4eae5-2dc3-4eec-aa93-d73d4f1be5ba] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 923.261757] env[69367]: ERROR oslo_messaging.rpc.server [None req-c6dcbadb-7650-4ae1-8fea-d19613654f29 tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Exception during message handling: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 923.261757] env[69367]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 923.261757] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 923.261757] env[69367]: ERROR oslo_messaging.rpc.server yield [ 923.261757] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 923.261757] env[69367]: ERROR oslo_messaging.rpc.server self.set_inventory_for_provider( [ 923.261757] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 923.261757] env[69367]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 923.261757] env[69367]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-12753221-d11a-4309-83f7-22333b1bc58b"}]} [ 923.261757] env[69367]: ERROR oslo_messaging.rpc.server [ 923.261757] env[69367]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 923.261757] env[69367]: ERROR oslo_messaging.rpc.server [ 923.261757] env[69367]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 923.261757] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 174, in _process_incoming [ 923.261757] env[69367]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 923.261757] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 923.261757] env[69367]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 923.261757] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 923.261757] env[69367]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 923.261757] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 923.261757] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 923.261757] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 923.261757] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 923.261757] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 923.261757] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 923.261757] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 923.261757] env[69367]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 923.261757] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 167, in decorated_function [ 923.261757] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 923.261757] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 923.261757] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 923.261757] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 923.261757] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 923.261757] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 158, in decorated_function [ 923.261757] env[69367]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 923.261757] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1483, in decorated_function [ 923.261757] env[69367]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 923.261757] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 214, in decorated_function [ 923.261757] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 923.261757] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 923.261757] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 923.261757] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 923.261757] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 923.261757] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 204, in decorated_function [ 923.261757] env[69367]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 923.261757] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3388, in terminate_instance [ 923.261757] env[69367]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 923.263017] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 923.263017] env[69367]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 923.263017] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3383, in do_terminate_instance [ 923.263017] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 923.263017] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 923.263017] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 923.263017] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 923.263017] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 923.263017] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 923.263017] env[69367]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 923.263017] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 923.263017] env[69367]: ERROR oslo_messaging.rpc.server self._complete_deletion(context, instance) [ 923.263017] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 923.263017] env[69367]: ERROR oslo_messaging.rpc.server self._update_resource_tracker(context, instance) [ 923.263017] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 923.263017] env[69367]: ERROR oslo_messaging.rpc.server self.rt.update_usage(context, instance, instance.node) [ 923.263017] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 923.263017] env[69367]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 923.263017] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 923.263017] env[69367]: ERROR oslo_messaging.rpc.server self._update(context.elevated(), self.compute_nodes[nodename]) [ 923.263017] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 923.263017] env[69367]: ERROR oslo_messaging.rpc.server self._update_to_placement(context, compute_node, startup) [ 923.263017] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 923.263017] env[69367]: ERROR oslo_messaging.rpc.server return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 923.263017] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 923.263017] env[69367]: ERROR oslo_messaging.rpc.server return attempt.get(self._wrap_exception) [ 923.263017] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 923.263017] env[69367]: ERROR oslo_messaging.rpc.server six.reraise(self.value[0], self.value[1], self.value[2]) [ 923.263017] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 923.263017] env[69367]: ERROR oslo_messaging.rpc.server raise value [ 923.263017] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 923.263017] env[69367]: ERROR oslo_messaging.rpc.server attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 923.263017] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 923.263017] env[69367]: ERROR oslo_messaging.rpc.server self.reportclient.update_from_provider_tree( [ 923.263017] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 923.263017] env[69367]: ERROR oslo_messaging.rpc.server with catch_all(pd.uuid): [ 923.263017] env[69367]: ERROR oslo_messaging.rpc.server File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 923.263017] env[69367]: ERROR oslo_messaging.rpc.server self.gen.throw(typ, value, traceback) [ 923.263017] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 923.263017] env[69367]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderSyncFailed() [ 923.263017] env[69367]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 923.263017] env[69367]: ERROR oslo_messaging.rpc.server [ 923.287522] env[69367]: INFO nova.scheduler.client.report [None req-571148d0-967c-42f0-92e5-f4d3d30b31a6 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Deleted allocations for instance 95efcff3-a81b-49fb-b85a-dae060c023b2 [ 923.303406] env[69367]: DEBUG nova.network.neutron [None req-8546bb22-6407-4505-bd4b-0996daabbd6b tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 923.404991] env[69367]: DEBUG nova.network.neutron [None req-8546bb22-6407-4505-bd4b-0996daabbd6b tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 923.424363] env[69367]: DEBUG oslo_concurrency.lockutils [None req-c1478397-0ed0-419b-94b5-970c8f06f7f3 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Acquiring lock "refresh_cache-652e2e23-7927-46ce-b8af-fffdb6ac8a3e" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 923.424595] env[69367]: DEBUG oslo_concurrency.lockutils [None req-c1478397-0ed0-419b-94b5-970c8f06f7f3 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Acquired lock "refresh_cache-652e2e23-7927-46ce-b8af-fffdb6ac8a3e" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 923.424781] env[69367]: DEBUG nova.network.neutron [None req-c1478397-0ed0-419b-94b5-970c8f06f7f3 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 923.715099] env[69367]: DEBUG nova.compute.manager [None req-661b4779-e44f-4925-a887-df3506a5c644 tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [instance: 46237e91-87ba-4b91-af8c-84d8dde87508] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 923.800366] env[69367]: DEBUG oslo_concurrency.lockutils [None req-571148d0-967c-42f0-92e5-f4d3d30b31a6 tempest-ImagesTestJSON-957163409 tempest-ImagesTestJSON-957163409-project-member] Lock "95efcff3-a81b-49fb-b85a-dae060c023b2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.453s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 923.909026] env[69367]: DEBUG oslo_concurrency.lockutils [None req-8546bb22-6407-4505-bd4b-0996daabbd6b tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Releasing lock "refresh_cache-d2f8328d-fd05-4e63-9cbd-a6e3ec948964" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 923.909491] env[69367]: DEBUG nova.compute.manager [None req-8546bb22-6407-4505-bd4b-0996daabbd6b tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Start destroying the instance on the hypervisor. {{(pid=69367) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 923.910554] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-8546bb22-6407-4505-bd4b-0996daabbd6b tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Destroying instance {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 923.910554] env[69367]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-49063c9d-aad8-4763-a38e-52189c5175c7 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.920951] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87bf4f1f-08b8-487e-9150-4d18767cf151 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.954921] env[69367]: WARNING nova.virt.vmwareapi.vmops [None req-8546bb22-6407-4505-bd4b-0996daabbd6b tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d2f8328d-fd05-4e63-9cbd-a6e3ec948964 could not be found. [ 923.955243] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-8546bb22-6407-4505-bd4b-0996daabbd6b tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Instance destroyed {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 923.955431] env[69367]: INFO nova.compute.manager [None req-8546bb22-6407-4505-bd4b-0996daabbd6b tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Took 0.05 seconds to destroy the instance on the hypervisor. [ 923.955688] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8546bb22-6407-4505-bd4b-0996daabbd6b tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 923.955921] env[69367]: DEBUG nova.compute.manager [-] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 923.956044] env[69367]: DEBUG nova.network.neutron [-] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 923.960507] env[69367]: DEBUG nova.network.neutron [None req-dbe267e5-e67b-4ed3-9d86-cb6e320e7fb1 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 31a4eae5-2dc3-4eec-aa93-d73d4f1be5ba] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 923.972489] env[69367]: DEBUG nova.network.neutron [None req-c1478397-0ed0-419b-94b5-970c8f06f7f3 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 923.975739] env[69367]: DEBUG nova.network.neutron [-] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 924.138119] env[69367]: DEBUG nova.network.neutron [None req-dbe267e5-e67b-4ed3-9d86-cb6e320e7fb1 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 31a4eae5-2dc3-4eec-aa93-d73d4f1be5ba] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 924.164987] env[69367]: DEBUG nova.network.neutron [None req-c1478397-0ed0-419b-94b5-970c8f06f7f3 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 924.247362] env[69367]: DEBUG oslo_concurrency.lockutils [None req-661b4779-e44f-4925-a887-df3506a5c644 tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 924.298674] env[69367]: DEBUG nova.scheduler.client.report [None req-62cd3237-0339-403e-aaf2-eb3e6851ac2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 924.334191] env[69367]: DEBUG nova.scheduler.client.report [None req-62cd3237-0339-403e-aaf2-eb3e6851ac2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 924.335035] env[69367]: DEBUG nova.compute.provider_tree [None req-62cd3237-0339-403e-aaf2-eb3e6851ac2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 924.354511] env[69367]: DEBUG nova.scheduler.client.report [None req-62cd3237-0339-403e-aaf2-eb3e6851ac2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 924.375794] env[69367]: DEBUG nova.scheduler.client.report [None req-62cd3237-0339-403e-aaf2-eb3e6851ac2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 924.480388] env[69367]: DEBUG nova.network.neutron [-] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 924.644025] env[69367]: DEBUG oslo_concurrency.lockutils [None req-dbe267e5-e67b-4ed3-9d86-cb6e320e7fb1 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Releasing lock "refresh_cache-31a4eae5-2dc3-4eec-aa93-d73d4f1be5ba" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 924.644300] env[69367]: DEBUG nova.compute.manager [None req-dbe267e5-e67b-4ed3-9d86-cb6e320e7fb1 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 924.644495] env[69367]: DEBUG nova.compute.manager [None req-dbe267e5-e67b-4ed3-9d86-cb6e320e7fb1 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 31a4eae5-2dc3-4eec-aa93-d73d4f1be5ba] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 924.644695] env[69367]: DEBUG nova.network.neutron [None req-dbe267e5-e67b-4ed3-9d86-cb6e320e7fb1 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 31a4eae5-2dc3-4eec-aa93-d73d4f1be5ba] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 924.667732] env[69367]: DEBUG oslo_concurrency.lockutils [None req-c1478397-0ed0-419b-94b5-970c8f06f7f3 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Releasing lock "refresh_cache-652e2e23-7927-46ce-b8af-fffdb6ac8a3e" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 924.668209] env[69367]: DEBUG nova.compute.manager [None req-c1478397-0ed0-419b-94b5-970c8f06f7f3 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Start destroying the instance on the hypervisor. {{(pid=69367) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 924.668407] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-c1478397-0ed0-419b-94b5-970c8f06f7f3 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Destroying instance {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 924.668718] env[69367]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cf8ff9dd-cdde-42d4-b3af-06ac07049450 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.671598] env[69367]: DEBUG nova.network.neutron [None req-dbe267e5-e67b-4ed3-9d86-cb6e320e7fb1 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 31a4eae5-2dc3-4eec-aa93-d73d4f1be5ba] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 924.688279] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2de81b4e-0aa2-4b4d-8acb-fe8d803fc613 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.719649] env[69367]: WARNING nova.virt.vmwareapi.vmops [None req-c1478397-0ed0-419b-94b5-970c8f06f7f3 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 652e2e23-7927-46ce-b8af-fffdb6ac8a3e could not be found. [ 924.719882] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-c1478397-0ed0-419b-94b5-970c8f06f7f3 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Instance destroyed {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 924.720058] env[69367]: INFO nova.compute.manager [None req-c1478397-0ed0-419b-94b5-970c8f06f7f3 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Took 0.05 seconds to destroy the instance on the hypervisor. [ 924.720604] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c1478397-0ed0-419b-94b5-970c8f06f7f3 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 924.720835] env[69367]: DEBUG nova.compute.manager [-] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 924.720937] env[69367]: DEBUG nova.network.neutron [-] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 924.751057] env[69367]: DEBUG nova.network.neutron [-] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 924.802019] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74c0e4c6-3fe7-41c6-abfe-169a435bd8a3 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.812968] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-951d6d97-3884-4a82-bf7c-866002710f9d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.856633] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d71c620c-828e-4ca8-a413-0d85bbfeeccb {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.862299] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d16e25d-3a8d-4374-9f27-276a4bc73779 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.878883] env[69367]: DEBUG nova.compute.provider_tree [None req-62cd3237-0339-403e-aaf2-eb3e6851ac2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 924.983366] env[69367]: INFO nova.compute.manager [-] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Took 1.03 seconds to deallocate network for instance. [ 925.181209] env[69367]: DEBUG nova.network.neutron [None req-dbe267e5-e67b-4ed3-9d86-cb6e320e7fb1 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 31a4eae5-2dc3-4eec-aa93-d73d4f1be5ba] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 925.253643] env[69367]: DEBUG nova.network.neutron [-] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 925.410595] env[69367]: ERROR nova.scheduler.client.report [None req-62cd3237-0339-403e-aaf2-eb3e6851ac2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [req-5653a157-86f9-4698-b968-1ade4b75b446] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-5653a157-86f9-4698-b968-1ade4b75b446"}]} [ 925.411173] env[69367]: DEBUG oslo_concurrency.lockutils [None req-62cd3237-0339-403e-aaf2-eb3e6851ac2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.159s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 925.412272] env[69367]: ERROR nova.compute.manager [None req-62cd3237-0339-403e-aaf2-eb3e6851ac2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: 882f1751-5f90-43f8-92d6-c174a6aad09b] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 925.412272] env[69367]: ERROR nova.compute.manager [instance: 882f1751-5f90-43f8-92d6-c174a6aad09b] Traceback (most recent call last): [ 925.412272] env[69367]: ERROR nova.compute.manager [instance: 882f1751-5f90-43f8-92d6-c174a6aad09b] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 925.412272] env[69367]: ERROR nova.compute.manager [instance: 882f1751-5f90-43f8-92d6-c174a6aad09b] yield [ 925.412272] env[69367]: ERROR nova.compute.manager [instance: 882f1751-5f90-43f8-92d6-c174a6aad09b] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 925.412272] env[69367]: ERROR nova.compute.manager [instance: 882f1751-5f90-43f8-92d6-c174a6aad09b] self.set_inventory_for_provider( [ 925.412272] env[69367]: ERROR nova.compute.manager [instance: 882f1751-5f90-43f8-92d6-c174a6aad09b] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 925.412272] env[69367]: ERROR nova.compute.manager [instance: 882f1751-5f90-43f8-92d6-c174a6aad09b] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 925.412272] env[69367]: ERROR nova.compute.manager [instance: 882f1751-5f90-43f8-92d6-c174a6aad09b] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-5653a157-86f9-4698-b968-1ade4b75b446"}]} [ 925.412272] env[69367]: ERROR nova.compute.manager [instance: 882f1751-5f90-43f8-92d6-c174a6aad09b] [ 925.412272] env[69367]: ERROR nova.compute.manager [instance: 882f1751-5f90-43f8-92d6-c174a6aad09b] During handling of the above exception, another exception occurred: [ 925.412272] env[69367]: ERROR nova.compute.manager [instance: 882f1751-5f90-43f8-92d6-c174a6aad09b] [ 925.412272] env[69367]: ERROR nova.compute.manager [instance: 882f1751-5f90-43f8-92d6-c174a6aad09b] Traceback (most recent call last): [ 925.412272] env[69367]: ERROR nova.compute.manager [instance: 882f1751-5f90-43f8-92d6-c174a6aad09b] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 925.412272] env[69367]: ERROR nova.compute.manager [instance: 882f1751-5f90-43f8-92d6-c174a6aad09b] with self.rt.instance_claim(context, instance, node, allocs, [ 925.412272] env[69367]: ERROR nova.compute.manager [instance: 882f1751-5f90-43f8-92d6-c174a6aad09b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 925.412272] env[69367]: ERROR nova.compute.manager [instance: 882f1751-5f90-43f8-92d6-c174a6aad09b] return f(*args, **kwargs) [ 925.412272] env[69367]: ERROR nova.compute.manager [instance: 882f1751-5f90-43f8-92d6-c174a6aad09b] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 925.412272] env[69367]: ERROR nova.compute.manager [instance: 882f1751-5f90-43f8-92d6-c174a6aad09b] self._update(elevated, cn) [ 925.412272] env[69367]: ERROR nova.compute.manager [instance: 882f1751-5f90-43f8-92d6-c174a6aad09b] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 925.412272] env[69367]: ERROR nova.compute.manager [instance: 882f1751-5f90-43f8-92d6-c174a6aad09b] self._update_to_placement(context, compute_node, startup) [ 925.412272] env[69367]: ERROR nova.compute.manager [instance: 882f1751-5f90-43f8-92d6-c174a6aad09b] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 925.412272] env[69367]: ERROR nova.compute.manager [instance: 882f1751-5f90-43f8-92d6-c174a6aad09b] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 925.412272] env[69367]: ERROR nova.compute.manager [instance: 882f1751-5f90-43f8-92d6-c174a6aad09b] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 925.412272] env[69367]: ERROR nova.compute.manager [instance: 882f1751-5f90-43f8-92d6-c174a6aad09b] return attempt.get(self._wrap_exception) [ 925.412272] env[69367]: ERROR nova.compute.manager [instance: 882f1751-5f90-43f8-92d6-c174a6aad09b] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 925.412272] env[69367]: ERROR nova.compute.manager [instance: 882f1751-5f90-43f8-92d6-c174a6aad09b] six.reraise(self.value[0], self.value[1], self.value[2]) [ 925.412272] env[69367]: ERROR nova.compute.manager [instance: 882f1751-5f90-43f8-92d6-c174a6aad09b] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 925.412272] env[69367]: ERROR nova.compute.manager [instance: 882f1751-5f90-43f8-92d6-c174a6aad09b] raise value [ 925.412272] env[69367]: ERROR nova.compute.manager [instance: 882f1751-5f90-43f8-92d6-c174a6aad09b] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 925.412272] env[69367]: ERROR nova.compute.manager [instance: 882f1751-5f90-43f8-92d6-c174a6aad09b] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 925.412272] env[69367]: ERROR nova.compute.manager [instance: 882f1751-5f90-43f8-92d6-c174a6aad09b] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 925.412272] env[69367]: ERROR nova.compute.manager [instance: 882f1751-5f90-43f8-92d6-c174a6aad09b] self.reportclient.update_from_provider_tree( [ 925.412272] env[69367]: ERROR nova.compute.manager [instance: 882f1751-5f90-43f8-92d6-c174a6aad09b] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 925.413724] env[69367]: ERROR nova.compute.manager [instance: 882f1751-5f90-43f8-92d6-c174a6aad09b] with catch_all(pd.uuid): [ 925.413724] env[69367]: ERROR nova.compute.manager [instance: 882f1751-5f90-43f8-92d6-c174a6aad09b] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 925.413724] env[69367]: ERROR nova.compute.manager [instance: 882f1751-5f90-43f8-92d6-c174a6aad09b] self.gen.throw(typ, value, traceback) [ 925.413724] env[69367]: ERROR nova.compute.manager [instance: 882f1751-5f90-43f8-92d6-c174a6aad09b] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 925.413724] env[69367]: ERROR nova.compute.manager [instance: 882f1751-5f90-43f8-92d6-c174a6aad09b] raise exception.ResourceProviderSyncFailed() [ 925.413724] env[69367]: ERROR nova.compute.manager [instance: 882f1751-5f90-43f8-92d6-c174a6aad09b] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 925.413724] env[69367]: ERROR nova.compute.manager [instance: 882f1751-5f90-43f8-92d6-c174a6aad09b] [ 925.413724] env[69367]: DEBUG nova.compute.utils [None req-62cd3237-0339-403e-aaf2-eb3e6851ac2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: 882f1751-5f90-43f8-92d6-c174a6aad09b] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 925.415522] env[69367]: DEBUG oslo_concurrency.lockutils [None req-22a6e440-2f28-49a6-9890-9c7ebb48122d tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.296s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 925.415714] env[69367]: DEBUG oslo_concurrency.lockutils [None req-22a6e440-2f28-49a6-9890-9c7ebb48122d tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 925.415913] env[69367]: INFO nova.compute.manager [None req-22a6e440-2f28-49a6-9890-9c7ebb48122d tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Successfully reverted task state from None on failure for instance. [ 925.419724] env[69367]: DEBUG oslo_concurrency.lockutils [None req-df57d2c5-5562-4103-b0a0-49bd43abf9bb tempest-ServerGroupTestJSON-565755592 tempest-ServerGroupTestJSON-565755592-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.205s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 925.422582] env[69367]: INFO nova.compute.claims [None req-df57d2c5-5562-4103-b0a0-49bd43abf9bb tempest-ServerGroupTestJSON-565755592 tempest-ServerGroupTestJSON-565755592-project-member] [instance: a1714871-0888-4957-9f26-18ddc4b73ecd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 925.426113] env[69367]: DEBUG nova.compute.manager [None req-62cd3237-0339-403e-aaf2-eb3e6851ac2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: 882f1751-5f90-43f8-92d6-c174a6aad09b] Build of instance 882f1751-5f90-43f8-92d6-c174a6aad09b was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 925.426460] env[69367]: DEBUG nova.compute.manager [None req-62cd3237-0339-403e-aaf2-eb3e6851ac2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: 882f1751-5f90-43f8-92d6-c174a6aad09b] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 925.427547] env[69367]: DEBUG oslo_concurrency.lockutils [None req-62cd3237-0339-403e-aaf2-eb3e6851ac2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Acquiring lock "refresh_cache-882f1751-5f90-43f8-92d6-c174a6aad09b" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 925.427547] env[69367]: DEBUG oslo_concurrency.lockutils [None req-62cd3237-0339-403e-aaf2-eb3e6851ac2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Acquired lock "refresh_cache-882f1751-5f90-43f8-92d6-c174a6aad09b" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 925.427547] env[69367]: DEBUG nova.network.neutron [None req-62cd3237-0339-403e-aaf2-eb3e6851ac2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: 882f1751-5f90-43f8-92d6-c174a6aad09b] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 925.428854] env[69367]: ERROR oslo_messaging.rpc.server [None req-22a6e440-2f28-49a6-9890-9c7ebb48122d tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Exception during message handling: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 925.428854] env[69367]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 925.428854] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 925.428854] env[69367]: ERROR oslo_messaging.rpc.server yield [ 925.428854] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 925.428854] env[69367]: ERROR oslo_messaging.rpc.server self.set_inventory_for_provider( [ 925.428854] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 925.428854] env[69367]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 925.428854] env[69367]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-28bd03b6-6b3e-4b71-9c8f-61d14f3af9d6"}]} [ 925.428854] env[69367]: ERROR oslo_messaging.rpc.server [ 925.428854] env[69367]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 925.428854] env[69367]: ERROR oslo_messaging.rpc.server [ 925.428854] env[69367]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 925.428854] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 174, in _process_incoming [ 925.428854] env[69367]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 925.428854] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 925.428854] env[69367]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 925.428854] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 925.428854] env[69367]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 925.428854] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 925.428854] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 925.428854] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 925.428854] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 925.428854] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 925.428854] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 925.428854] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 925.428854] env[69367]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 925.428854] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 167, in decorated_function [ 925.428854] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 925.428854] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 925.428854] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 925.428854] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 925.428854] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 925.428854] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 158, in decorated_function [ 925.428854] env[69367]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 925.428854] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1483, in decorated_function [ 925.428854] env[69367]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 925.428854] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 214, in decorated_function [ 925.428854] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 925.428854] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 925.428854] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 925.428854] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 925.428854] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 925.428854] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 204, in decorated_function [ 925.428854] env[69367]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 925.428854] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3388, in terminate_instance [ 925.428854] env[69367]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 925.430199] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 925.430199] env[69367]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 925.430199] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3383, in do_terminate_instance [ 925.430199] env[69367]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 925.430199] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 925.430199] env[69367]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 925.430199] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 925.430199] env[69367]: ERROR oslo_messaging.rpc.server raise self.value [ 925.430199] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 925.430199] env[69367]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 925.430199] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 925.430199] env[69367]: ERROR oslo_messaging.rpc.server self._complete_deletion(context, instance) [ 925.430199] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 925.430199] env[69367]: ERROR oslo_messaging.rpc.server self._update_resource_tracker(context, instance) [ 925.430199] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 925.430199] env[69367]: ERROR oslo_messaging.rpc.server self.rt.update_usage(context, instance, instance.node) [ 925.430199] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 925.430199] env[69367]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 925.430199] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 925.430199] env[69367]: ERROR oslo_messaging.rpc.server self._update(context.elevated(), self.compute_nodes[nodename]) [ 925.430199] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 925.430199] env[69367]: ERROR oslo_messaging.rpc.server self._update_to_placement(context, compute_node, startup) [ 925.430199] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 925.430199] env[69367]: ERROR oslo_messaging.rpc.server return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 925.430199] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 925.430199] env[69367]: ERROR oslo_messaging.rpc.server return attempt.get(self._wrap_exception) [ 925.430199] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 925.430199] env[69367]: ERROR oslo_messaging.rpc.server six.reraise(self.value[0], self.value[1], self.value[2]) [ 925.430199] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 925.430199] env[69367]: ERROR oslo_messaging.rpc.server raise value [ 925.430199] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 925.430199] env[69367]: ERROR oslo_messaging.rpc.server attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 925.430199] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 925.430199] env[69367]: ERROR oslo_messaging.rpc.server self.reportclient.update_from_provider_tree( [ 925.430199] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 925.430199] env[69367]: ERROR oslo_messaging.rpc.server with catch_all(pd.uuid): [ 925.430199] env[69367]: ERROR oslo_messaging.rpc.server File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 925.430199] env[69367]: ERROR oslo_messaging.rpc.server self.gen.throw(typ, value, traceback) [ 925.430199] env[69367]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 925.430199] env[69367]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderSyncFailed() [ 925.430199] env[69367]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 925.430199] env[69367]: ERROR oslo_messaging.rpc.server [ 925.490104] env[69367]: DEBUG oslo_concurrency.lockutils [None req-8546bb22-6407-4505-bd4b-0996daabbd6b tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 925.686485] env[69367]: INFO nova.compute.manager [None req-dbe267e5-e67b-4ed3-9d86-cb6e320e7fb1 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 31a4eae5-2dc3-4eec-aa93-d73d4f1be5ba] Took 1.04 seconds to deallocate network for instance. [ 925.764216] env[69367]: INFO nova.compute.manager [-] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Took 1.04 seconds to deallocate network for instance. [ 925.952409] env[69367]: DEBUG nova.network.neutron [None req-62cd3237-0339-403e-aaf2-eb3e6851ac2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: 882f1751-5f90-43f8-92d6-c174a6aad09b] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 926.074092] env[69367]: DEBUG nova.network.neutron [None req-62cd3237-0339-403e-aaf2-eb3e6851ac2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: 882f1751-5f90-43f8-92d6-c174a6aad09b] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 926.277691] env[69367]: DEBUG oslo_concurrency.lockutils [None req-c1478397-0ed0-419b-94b5-970c8f06f7f3 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 926.278634] env[69367]: DEBUG oslo_concurrency.lockutils [None req-3de08db1-c559-428d-b2dc-e4c3733e154b tempest-ServerActionsTestJSON-1724285782 tempest-ServerActionsTestJSON-1724285782-project-member] Acquiring lock "64bf5848-e98b-47bb-a61f-7e9afce5bded" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 926.285859] env[69367]: DEBUG oslo_concurrency.lockutils [None req-3de08db1-c559-428d-b2dc-e4c3733e154b tempest-ServerActionsTestJSON-1724285782 tempest-ServerActionsTestJSON-1724285782-project-member] Lock "64bf5848-e98b-47bb-a61f-7e9afce5bded" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 926.459500] env[69367]: DEBUG nova.scheduler.client.report [None req-df57d2c5-5562-4103-b0a0-49bd43abf9bb tempest-ServerGroupTestJSON-565755592 tempest-ServerGroupTestJSON-565755592-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 926.474411] env[69367]: DEBUG nova.scheduler.client.report [None req-df57d2c5-5562-4103-b0a0-49bd43abf9bb tempest-ServerGroupTestJSON-565755592 tempest-ServerGroupTestJSON-565755592-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 926.474686] env[69367]: DEBUG nova.compute.provider_tree [None req-df57d2c5-5562-4103-b0a0-49bd43abf9bb tempest-ServerGroupTestJSON-565755592 tempest-ServerGroupTestJSON-565755592-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 926.487205] env[69367]: DEBUG nova.scheduler.client.report [None req-df57d2c5-5562-4103-b0a0-49bd43abf9bb tempest-ServerGroupTestJSON-565755592 tempest-ServerGroupTestJSON-565755592-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 926.509247] env[69367]: DEBUG nova.scheduler.client.report [None req-df57d2c5-5562-4103-b0a0-49bd43abf9bb tempest-ServerGroupTestJSON-565755592 tempest-ServerGroupTestJSON-565755592-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 926.582655] env[69367]: DEBUG oslo_concurrency.lockutils [None req-62cd3237-0339-403e-aaf2-eb3e6851ac2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Releasing lock "refresh_cache-882f1751-5f90-43f8-92d6-c174a6aad09b" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 926.582892] env[69367]: DEBUG nova.compute.manager [None req-62cd3237-0339-403e-aaf2-eb3e6851ac2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 926.583087] env[69367]: DEBUG nova.compute.manager [None req-62cd3237-0339-403e-aaf2-eb3e6851ac2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: 882f1751-5f90-43f8-92d6-c174a6aad09b] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 926.583344] env[69367]: DEBUG nova.network.neutron [None req-62cd3237-0339-403e-aaf2-eb3e6851ac2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: 882f1751-5f90-43f8-92d6-c174a6aad09b] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 926.601658] env[69367]: DEBUG nova.network.neutron [None req-62cd3237-0339-403e-aaf2-eb3e6851ac2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: 882f1751-5f90-43f8-92d6-c174a6aad09b] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 926.720524] env[69367]: INFO nova.scheduler.client.report [None req-dbe267e5-e67b-4ed3-9d86-cb6e320e7fb1 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Deleted allocations for instance 31a4eae5-2dc3-4eec-aa93-d73d4f1be5ba [ 926.782311] env[69367]: DEBUG nova.compute.manager [None req-3de08db1-c559-428d-b2dc-e4c3733e154b tempest-ServerActionsTestJSON-1724285782 tempest-ServerActionsTestJSON-1724285782-project-member] [instance: 64bf5848-e98b-47bb-a61f-7e9afce5bded] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 926.889908] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-837b58a6-b09a-45d4-bcf9-d491bcfe76ca {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.899613] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35e28100-0303-496b-a1e2-4e9682544724 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.936777] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be514784-172b-4fc9-b125-761ed6ba64ba {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.945439] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a58a86d-8be8-4f71-97b4-99e059427491 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.960019] env[69367]: DEBUG nova.compute.provider_tree [None req-df57d2c5-5562-4103-b0a0-49bd43abf9bb tempest-ServerGroupTestJSON-565755592 tempest-ServerGroupTestJSON-565755592-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 927.104457] env[69367]: DEBUG nova.network.neutron [None req-62cd3237-0339-403e-aaf2-eb3e6851ac2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: 882f1751-5f90-43f8-92d6-c174a6aad09b] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 927.234684] env[69367]: DEBUG oslo_concurrency.lockutils [None req-dbe267e5-e67b-4ed3-9d86-cb6e320e7fb1 tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Lock "31a4eae5-2dc3-4eec-aa93-d73d4f1be5ba" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.310s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 927.307034] env[69367]: DEBUG oslo_concurrency.lockutils [None req-3de08db1-c559-428d-b2dc-e4c3733e154b tempest-ServerActionsTestJSON-1724285782 tempest-ServerActionsTestJSON-1724285782-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 927.487249] env[69367]: ERROR nova.scheduler.client.report [None req-df57d2c5-5562-4103-b0a0-49bd43abf9bb tempest-ServerGroupTestJSON-565755592 tempest-ServerGroupTestJSON-565755592-project-member] [req-fdce0066-cb06-47fb-81a0-18be2d0e897b] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-fdce0066-cb06-47fb-81a0-18be2d0e897b"}]} [ 927.487601] env[69367]: DEBUG oslo_concurrency.lockutils [None req-df57d2c5-5562-4103-b0a0-49bd43abf9bb tempest-ServerGroupTestJSON-565755592 tempest-ServerGroupTestJSON-565755592-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.068s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 927.488710] env[69367]: ERROR nova.compute.manager [None req-df57d2c5-5562-4103-b0a0-49bd43abf9bb tempest-ServerGroupTestJSON-565755592 tempest-ServerGroupTestJSON-565755592-project-member] [instance: a1714871-0888-4957-9f26-18ddc4b73ecd] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 927.488710] env[69367]: ERROR nova.compute.manager [instance: a1714871-0888-4957-9f26-18ddc4b73ecd] Traceback (most recent call last): [ 927.488710] env[69367]: ERROR nova.compute.manager [instance: a1714871-0888-4957-9f26-18ddc4b73ecd] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 927.488710] env[69367]: ERROR nova.compute.manager [instance: a1714871-0888-4957-9f26-18ddc4b73ecd] yield [ 927.488710] env[69367]: ERROR nova.compute.manager [instance: a1714871-0888-4957-9f26-18ddc4b73ecd] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 927.488710] env[69367]: ERROR nova.compute.manager [instance: a1714871-0888-4957-9f26-18ddc4b73ecd] self.set_inventory_for_provider( [ 927.488710] env[69367]: ERROR nova.compute.manager [instance: a1714871-0888-4957-9f26-18ddc4b73ecd] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 927.488710] env[69367]: ERROR nova.compute.manager [instance: a1714871-0888-4957-9f26-18ddc4b73ecd] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 927.488710] env[69367]: ERROR nova.compute.manager [instance: a1714871-0888-4957-9f26-18ddc4b73ecd] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-fdce0066-cb06-47fb-81a0-18be2d0e897b"}]} [ 927.488710] env[69367]: ERROR nova.compute.manager [instance: a1714871-0888-4957-9f26-18ddc4b73ecd] [ 927.488710] env[69367]: ERROR nova.compute.manager [instance: a1714871-0888-4957-9f26-18ddc4b73ecd] During handling of the above exception, another exception occurred: [ 927.488710] env[69367]: ERROR nova.compute.manager [instance: a1714871-0888-4957-9f26-18ddc4b73ecd] [ 927.488710] env[69367]: ERROR nova.compute.manager [instance: a1714871-0888-4957-9f26-18ddc4b73ecd] Traceback (most recent call last): [ 927.488710] env[69367]: ERROR nova.compute.manager [instance: a1714871-0888-4957-9f26-18ddc4b73ecd] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 927.488710] env[69367]: ERROR nova.compute.manager [instance: a1714871-0888-4957-9f26-18ddc4b73ecd] with self.rt.instance_claim(context, instance, node, allocs, [ 927.488710] env[69367]: ERROR nova.compute.manager [instance: a1714871-0888-4957-9f26-18ddc4b73ecd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 927.488710] env[69367]: ERROR nova.compute.manager [instance: a1714871-0888-4957-9f26-18ddc4b73ecd] return f(*args, **kwargs) [ 927.488710] env[69367]: ERROR nova.compute.manager [instance: a1714871-0888-4957-9f26-18ddc4b73ecd] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 927.488710] env[69367]: ERROR nova.compute.manager [instance: a1714871-0888-4957-9f26-18ddc4b73ecd] self._update(elevated, cn) [ 927.488710] env[69367]: ERROR nova.compute.manager [instance: a1714871-0888-4957-9f26-18ddc4b73ecd] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 927.488710] env[69367]: ERROR nova.compute.manager [instance: a1714871-0888-4957-9f26-18ddc4b73ecd] self._update_to_placement(context, compute_node, startup) [ 927.488710] env[69367]: ERROR nova.compute.manager [instance: a1714871-0888-4957-9f26-18ddc4b73ecd] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 927.488710] env[69367]: ERROR nova.compute.manager [instance: a1714871-0888-4957-9f26-18ddc4b73ecd] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 927.488710] env[69367]: ERROR nova.compute.manager [instance: a1714871-0888-4957-9f26-18ddc4b73ecd] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 927.488710] env[69367]: ERROR nova.compute.manager [instance: a1714871-0888-4957-9f26-18ddc4b73ecd] return attempt.get(self._wrap_exception) [ 927.488710] env[69367]: ERROR nova.compute.manager [instance: a1714871-0888-4957-9f26-18ddc4b73ecd] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 927.488710] env[69367]: ERROR nova.compute.manager [instance: a1714871-0888-4957-9f26-18ddc4b73ecd] six.reraise(self.value[0], self.value[1], self.value[2]) [ 927.488710] env[69367]: ERROR nova.compute.manager [instance: a1714871-0888-4957-9f26-18ddc4b73ecd] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 927.488710] env[69367]: ERROR nova.compute.manager [instance: a1714871-0888-4957-9f26-18ddc4b73ecd] raise value [ 927.488710] env[69367]: ERROR nova.compute.manager [instance: a1714871-0888-4957-9f26-18ddc4b73ecd] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 927.488710] env[69367]: ERROR nova.compute.manager [instance: a1714871-0888-4957-9f26-18ddc4b73ecd] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 927.488710] env[69367]: ERROR nova.compute.manager [instance: a1714871-0888-4957-9f26-18ddc4b73ecd] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 927.488710] env[69367]: ERROR nova.compute.manager [instance: a1714871-0888-4957-9f26-18ddc4b73ecd] self.reportclient.update_from_provider_tree( [ 927.488710] env[69367]: ERROR nova.compute.manager [instance: a1714871-0888-4957-9f26-18ddc4b73ecd] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 927.489653] env[69367]: ERROR nova.compute.manager [instance: a1714871-0888-4957-9f26-18ddc4b73ecd] with catch_all(pd.uuid): [ 927.489653] env[69367]: ERROR nova.compute.manager [instance: a1714871-0888-4957-9f26-18ddc4b73ecd] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 927.489653] env[69367]: ERROR nova.compute.manager [instance: a1714871-0888-4957-9f26-18ddc4b73ecd] self.gen.throw(typ, value, traceback) [ 927.489653] env[69367]: ERROR nova.compute.manager [instance: a1714871-0888-4957-9f26-18ddc4b73ecd] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 927.489653] env[69367]: ERROR nova.compute.manager [instance: a1714871-0888-4957-9f26-18ddc4b73ecd] raise exception.ResourceProviderSyncFailed() [ 927.489653] env[69367]: ERROR nova.compute.manager [instance: a1714871-0888-4957-9f26-18ddc4b73ecd] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 927.489653] env[69367]: ERROR nova.compute.manager [instance: a1714871-0888-4957-9f26-18ddc4b73ecd] [ 927.489653] env[69367]: DEBUG nova.compute.utils [None req-df57d2c5-5562-4103-b0a0-49bd43abf9bb tempest-ServerGroupTestJSON-565755592 tempest-ServerGroupTestJSON-565755592-project-member] [instance: a1714871-0888-4957-9f26-18ddc4b73ecd] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 927.492079] env[69367]: DEBUG oslo_concurrency.lockutils [None req-661b4779-e44f-4925-a887-df3506a5c644 tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.245s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 927.494522] env[69367]: INFO nova.compute.claims [None req-661b4779-e44f-4925-a887-df3506a5c644 tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [instance: 46237e91-87ba-4b91-af8c-84d8dde87508] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 927.497376] env[69367]: DEBUG nova.compute.manager [None req-df57d2c5-5562-4103-b0a0-49bd43abf9bb tempest-ServerGroupTestJSON-565755592 tempest-ServerGroupTestJSON-565755592-project-member] [instance: a1714871-0888-4957-9f26-18ddc4b73ecd] Build of instance a1714871-0888-4957-9f26-18ddc4b73ecd was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 927.497746] env[69367]: DEBUG nova.compute.manager [None req-df57d2c5-5562-4103-b0a0-49bd43abf9bb tempest-ServerGroupTestJSON-565755592 tempest-ServerGroupTestJSON-565755592-project-member] [instance: a1714871-0888-4957-9f26-18ddc4b73ecd] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 927.497987] env[69367]: DEBUG oslo_concurrency.lockutils [None req-df57d2c5-5562-4103-b0a0-49bd43abf9bb tempest-ServerGroupTestJSON-565755592 tempest-ServerGroupTestJSON-565755592-project-member] Acquiring lock "refresh_cache-a1714871-0888-4957-9f26-18ddc4b73ecd" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 927.498194] env[69367]: DEBUG oslo_concurrency.lockutils [None req-df57d2c5-5562-4103-b0a0-49bd43abf9bb tempest-ServerGroupTestJSON-565755592 tempest-ServerGroupTestJSON-565755592-project-member] Acquired lock "refresh_cache-a1714871-0888-4957-9f26-18ddc4b73ecd" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 927.498323] env[69367]: DEBUG nova.network.neutron [None req-df57d2c5-5562-4103-b0a0-49bd43abf9bb tempest-ServerGroupTestJSON-565755592 tempest-ServerGroupTestJSON-565755592-project-member] [instance: a1714871-0888-4957-9f26-18ddc4b73ecd] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 927.608513] env[69367]: INFO nova.compute.manager [None req-62cd3237-0339-403e-aaf2-eb3e6851ac2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: 882f1751-5f90-43f8-92d6-c174a6aad09b] Took 1.02 seconds to deallocate network for instance. [ 928.028681] env[69367]: DEBUG nova.network.neutron [None req-df57d2c5-5562-4103-b0a0-49bd43abf9bb tempest-ServerGroupTestJSON-565755592 tempest-ServerGroupTestJSON-565755592-project-member] [instance: a1714871-0888-4957-9f26-18ddc4b73ecd] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 928.182766] env[69367]: DEBUG nova.network.neutron [None req-df57d2c5-5562-4103-b0a0-49bd43abf9bb tempest-ServerGroupTestJSON-565755592 tempest-ServerGroupTestJSON-565755592-project-member] [instance: a1714871-0888-4957-9f26-18ddc4b73ecd] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 928.427386] env[69367]: DEBUG nova.compute.manager [req-1002ff78-49e6-4e51-9f33-684e3cc16f58 req-cac5f07a-92c4-44a4-af6e-46a450979b48 service nova] [instance: 42db60d9-e5f7-4925-8f6f-d3884687414a] Received event network-changed-1daae92e-1898-467c-be43-e8f27bff4242 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 928.427386] env[69367]: DEBUG nova.compute.manager [req-1002ff78-49e6-4e51-9f33-684e3cc16f58 req-cac5f07a-92c4-44a4-af6e-46a450979b48 service nova] [instance: 42db60d9-e5f7-4925-8f6f-d3884687414a] Refreshing instance network info cache due to event network-changed-1daae92e-1898-467c-be43-e8f27bff4242. {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11772}} [ 928.427386] env[69367]: DEBUG oslo_concurrency.lockutils [req-1002ff78-49e6-4e51-9f33-684e3cc16f58 req-cac5f07a-92c4-44a4-af6e-46a450979b48 service nova] Acquiring lock "refresh_cache-42db60d9-e5f7-4925-8f6f-d3884687414a" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 928.427386] env[69367]: DEBUG oslo_concurrency.lockutils [req-1002ff78-49e6-4e51-9f33-684e3cc16f58 req-cac5f07a-92c4-44a4-af6e-46a450979b48 service nova] Acquired lock "refresh_cache-42db60d9-e5f7-4925-8f6f-d3884687414a" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 928.427386] env[69367]: DEBUG nova.network.neutron [req-1002ff78-49e6-4e51-9f33-684e3cc16f58 req-cac5f07a-92c4-44a4-af6e-46a450979b48 service nova] [instance: 42db60d9-e5f7-4925-8f6f-d3884687414a] Refreshing network info cache for port 1daae92e-1898-467c-be43-e8f27bff4242 {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 928.526777] env[69367]: DEBUG nova.scheduler.client.report [None req-661b4779-e44f-4925-a887-df3506a5c644 tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 928.543907] env[69367]: DEBUG nova.scheduler.client.report [None req-661b4779-e44f-4925-a887-df3506a5c644 tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 928.543907] env[69367]: DEBUG nova.compute.provider_tree [None req-661b4779-e44f-4925-a887-df3506a5c644 tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 928.557388] env[69367]: DEBUG nova.scheduler.client.report [None req-661b4779-e44f-4925-a887-df3506a5c644 tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 928.578809] env[69367]: DEBUG nova.scheduler.client.report [None req-661b4779-e44f-4925-a887-df3506a5c644 tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 928.650601] env[69367]: INFO nova.scheduler.client.report [None req-62cd3237-0339-403e-aaf2-eb3e6851ac2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Deleted allocations for instance 882f1751-5f90-43f8-92d6-c174a6aad09b [ 928.685695] env[69367]: DEBUG oslo_concurrency.lockutils [None req-df57d2c5-5562-4103-b0a0-49bd43abf9bb tempest-ServerGroupTestJSON-565755592 tempest-ServerGroupTestJSON-565755592-project-member] Releasing lock "refresh_cache-a1714871-0888-4957-9f26-18ddc4b73ecd" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 928.685960] env[69367]: DEBUG nova.compute.manager [None req-df57d2c5-5562-4103-b0a0-49bd43abf9bb tempest-ServerGroupTestJSON-565755592 tempest-ServerGroupTestJSON-565755592-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 928.686227] env[69367]: DEBUG nova.compute.manager [None req-df57d2c5-5562-4103-b0a0-49bd43abf9bb tempest-ServerGroupTestJSON-565755592 tempest-ServerGroupTestJSON-565755592-project-member] [instance: a1714871-0888-4957-9f26-18ddc4b73ecd] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 928.686511] env[69367]: DEBUG nova.network.neutron [None req-df57d2c5-5562-4103-b0a0-49bd43abf9bb tempest-ServerGroupTestJSON-565755592 tempest-ServerGroupTestJSON-565755592-project-member] [instance: a1714871-0888-4957-9f26-18ddc4b73ecd] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 928.713231] env[69367]: DEBUG nova.network.neutron [None req-df57d2c5-5562-4103-b0a0-49bd43abf9bb tempest-ServerGroupTestJSON-565755592 tempest-ServerGroupTestJSON-565755592-project-member] [instance: a1714871-0888-4957-9f26-18ddc4b73ecd] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 928.923264] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a6a6eb6c-a52f-4b85-9cc4-e1ae3d0ed55a tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Acquiring lock "837b4093-308b-440b-940d-fc0227a5c590" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 928.923527] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a6a6eb6c-a52f-4b85-9cc4-e1ae3d0ed55a tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Lock "837b4093-308b-440b-940d-fc0227a5c590" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 928.924305] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a6a6eb6c-a52f-4b85-9cc4-e1ae3d0ed55a tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Acquiring lock "837b4093-308b-440b-940d-fc0227a5c590-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 928.924305] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a6a6eb6c-a52f-4b85-9cc4-e1ae3d0ed55a tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Lock "837b4093-308b-440b-940d-fc0227a5c590-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 928.924305] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a6a6eb6c-a52f-4b85-9cc4-e1ae3d0ed55a tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Lock "837b4093-308b-440b-940d-fc0227a5c590-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 928.926187] env[69367]: INFO nova.compute.manager [None req-a6a6eb6c-a52f-4b85-9cc4-e1ae3d0ed55a tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Terminating instance [ 928.992155] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e76e314-9454-431d-8466-c1efdba71377 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.002144] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c621284-8b06-4f57-a69c-138d2b6055a4 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.041345] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac3f389c-a1c3-4c4f-9af9-22f29fee7d28 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.048959] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17d7615f-1389-48d6-97f1-f6164596ce72 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.065743] env[69367]: DEBUG nova.compute.provider_tree [None req-661b4779-e44f-4925-a887-df3506a5c644 tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 929.162744] env[69367]: DEBUG oslo_concurrency.lockutils [None req-62cd3237-0339-403e-aaf2-eb3e6851ac2d tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Lock "882f1751-5f90-43f8-92d6-c174a6aad09b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 7.574s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 929.218621] env[69367]: DEBUG nova.network.neutron [None req-df57d2c5-5562-4103-b0a0-49bd43abf9bb tempest-ServerGroupTestJSON-565755592 tempest-ServerGroupTestJSON-565755592-project-member] [instance: a1714871-0888-4957-9f26-18ddc4b73ecd] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 929.222661] env[69367]: DEBUG nova.network.neutron [req-1002ff78-49e6-4e51-9f33-684e3cc16f58 req-cac5f07a-92c4-44a4-af6e-46a450979b48 service nova] [instance: 42db60d9-e5f7-4925-8f6f-d3884687414a] Updated VIF entry in instance network info cache for port 1daae92e-1898-467c-be43-e8f27bff4242. {{(pid=69367) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 929.223072] env[69367]: DEBUG nova.network.neutron [req-1002ff78-49e6-4e51-9f33-684e3cc16f58 req-cac5f07a-92c4-44a4-af6e-46a450979b48 service nova] [instance: 42db60d9-e5f7-4925-8f6f-d3884687414a] Updating instance_info_cache with network_info: [{"id": "1daae92e-1898-467c-be43-e8f27bff4242", "address": "fa:16:3e:b1:9b:f0", "network": {"id": "a8e5c1a6-2526-4042-8a8d-fdb54dbe7bf9", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1429177141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f5bc3d470905412ea72a8eedb98e9e47", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e6f11c0d-c73a-47f5-b02e-47bff48da0e4", "external-id": "nsx-vlan-transportzone-345", "segmentation_id": 345, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1daae92e-18", "ovs_interfaceid": "1daae92e-1898-467c-be43-e8f27bff4242", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 929.439407] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a6a6eb6c-a52f-4b85-9cc4-e1ae3d0ed55a tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Acquiring lock "refresh_cache-837b4093-308b-440b-940d-fc0227a5c590" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 929.439546] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a6a6eb6c-a52f-4b85-9cc4-e1ae3d0ed55a tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Acquired lock "refresh_cache-837b4093-308b-440b-940d-fc0227a5c590" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 929.439723] env[69367]: DEBUG nova.network.neutron [None req-a6a6eb6c-a52f-4b85-9cc4-e1ae3d0ed55a tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 929.590738] env[69367]: ERROR nova.scheduler.client.report [None req-661b4779-e44f-4925-a887-df3506a5c644 tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [req-3f81dc0a-1c2c-44df-9639-a3b38b19ac8a] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-3f81dc0a-1c2c-44df-9639-a3b38b19ac8a"}]} [ 929.590738] env[69367]: DEBUG oslo_concurrency.lockutils [None req-661b4779-e44f-4925-a887-df3506a5c644 tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.097s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 929.590738] env[69367]: ERROR nova.compute.manager [None req-661b4779-e44f-4925-a887-df3506a5c644 tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [instance: 46237e91-87ba-4b91-af8c-84d8dde87508] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 929.590738] env[69367]: ERROR nova.compute.manager [instance: 46237e91-87ba-4b91-af8c-84d8dde87508] Traceback (most recent call last): [ 929.590738] env[69367]: ERROR nova.compute.manager [instance: 46237e91-87ba-4b91-af8c-84d8dde87508] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 929.590738] env[69367]: ERROR nova.compute.manager [instance: 46237e91-87ba-4b91-af8c-84d8dde87508] yield [ 929.590738] env[69367]: ERROR nova.compute.manager [instance: 46237e91-87ba-4b91-af8c-84d8dde87508] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 929.590738] env[69367]: ERROR nova.compute.manager [instance: 46237e91-87ba-4b91-af8c-84d8dde87508] self.set_inventory_for_provider( [ 929.590738] env[69367]: ERROR nova.compute.manager [instance: 46237e91-87ba-4b91-af8c-84d8dde87508] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 929.590738] env[69367]: ERROR nova.compute.manager [instance: 46237e91-87ba-4b91-af8c-84d8dde87508] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 929.590738] env[69367]: ERROR nova.compute.manager [instance: 46237e91-87ba-4b91-af8c-84d8dde87508] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-3f81dc0a-1c2c-44df-9639-a3b38b19ac8a"}]} [ 929.590738] env[69367]: ERROR nova.compute.manager [instance: 46237e91-87ba-4b91-af8c-84d8dde87508] [ 929.590738] env[69367]: ERROR nova.compute.manager [instance: 46237e91-87ba-4b91-af8c-84d8dde87508] During handling of the above exception, another exception occurred: [ 929.590738] env[69367]: ERROR nova.compute.manager [instance: 46237e91-87ba-4b91-af8c-84d8dde87508] [ 929.590738] env[69367]: ERROR nova.compute.manager [instance: 46237e91-87ba-4b91-af8c-84d8dde87508] Traceback (most recent call last): [ 929.590738] env[69367]: ERROR nova.compute.manager [instance: 46237e91-87ba-4b91-af8c-84d8dde87508] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 929.590738] env[69367]: ERROR nova.compute.manager [instance: 46237e91-87ba-4b91-af8c-84d8dde87508] with self.rt.instance_claim(context, instance, node, allocs, [ 929.590738] env[69367]: ERROR nova.compute.manager [instance: 46237e91-87ba-4b91-af8c-84d8dde87508] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 929.590738] env[69367]: ERROR nova.compute.manager [instance: 46237e91-87ba-4b91-af8c-84d8dde87508] return f(*args, **kwargs) [ 929.590738] env[69367]: ERROR nova.compute.manager [instance: 46237e91-87ba-4b91-af8c-84d8dde87508] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 929.590738] env[69367]: ERROR nova.compute.manager [instance: 46237e91-87ba-4b91-af8c-84d8dde87508] self._update(elevated, cn) [ 929.590738] env[69367]: ERROR nova.compute.manager [instance: 46237e91-87ba-4b91-af8c-84d8dde87508] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 929.590738] env[69367]: ERROR nova.compute.manager [instance: 46237e91-87ba-4b91-af8c-84d8dde87508] self._update_to_placement(context, compute_node, startup) [ 929.590738] env[69367]: ERROR nova.compute.manager [instance: 46237e91-87ba-4b91-af8c-84d8dde87508] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 929.590738] env[69367]: ERROR nova.compute.manager [instance: 46237e91-87ba-4b91-af8c-84d8dde87508] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 929.591557] env[69367]: ERROR nova.compute.manager [instance: 46237e91-87ba-4b91-af8c-84d8dde87508] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 929.591557] env[69367]: ERROR nova.compute.manager [instance: 46237e91-87ba-4b91-af8c-84d8dde87508] return attempt.get(self._wrap_exception) [ 929.591557] env[69367]: ERROR nova.compute.manager [instance: 46237e91-87ba-4b91-af8c-84d8dde87508] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 929.591557] env[69367]: ERROR nova.compute.manager [instance: 46237e91-87ba-4b91-af8c-84d8dde87508] six.reraise(self.value[0], self.value[1], self.value[2]) [ 929.591557] env[69367]: ERROR nova.compute.manager [instance: 46237e91-87ba-4b91-af8c-84d8dde87508] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 929.591557] env[69367]: ERROR nova.compute.manager [instance: 46237e91-87ba-4b91-af8c-84d8dde87508] raise value [ 929.591557] env[69367]: ERROR nova.compute.manager [instance: 46237e91-87ba-4b91-af8c-84d8dde87508] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 929.591557] env[69367]: ERROR nova.compute.manager [instance: 46237e91-87ba-4b91-af8c-84d8dde87508] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 929.591557] env[69367]: ERROR nova.compute.manager [instance: 46237e91-87ba-4b91-af8c-84d8dde87508] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 929.591557] env[69367]: ERROR nova.compute.manager [instance: 46237e91-87ba-4b91-af8c-84d8dde87508] self.reportclient.update_from_provider_tree( [ 929.591557] env[69367]: ERROR nova.compute.manager [instance: 46237e91-87ba-4b91-af8c-84d8dde87508] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 929.591557] env[69367]: ERROR nova.compute.manager [instance: 46237e91-87ba-4b91-af8c-84d8dde87508] with catch_all(pd.uuid): [ 929.591557] env[69367]: ERROR nova.compute.manager [instance: 46237e91-87ba-4b91-af8c-84d8dde87508] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 929.591557] env[69367]: ERROR nova.compute.manager [instance: 46237e91-87ba-4b91-af8c-84d8dde87508] self.gen.throw(typ, value, traceback) [ 929.591557] env[69367]: ERROR nova.compute.manager [instance: 46237e91-87ba-4b91-af8c-84d8dde87508] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 929.591557] env[69367]: ERROR nova.compute.manager [instance: 46237e91-87ba-4b91-af8c-84d8dde87508] raise exception.ResourceProviderSyncFailed() [ 929.591557] env[69367]: ERROR nova.compute.manager [instance: 46237e91-87ba-4b91-af8c-84d8dde87508] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 929.591557] env[69367]: ERROR nova.compute.manager [instance: 46237e91-87ba-4b91-af8c-84d8dde87508] [ 929.591557] env[69367]: DEBUG nova.compute.utils [None req-661b4779-e44f-4925-a887-df3506a5c644 tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [instance: 46237e91-87ba-4b91-af8c-84d8dde87508] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 929.593143] env[69367]: DEBUG oslo_concurrency.lockutils [None req-8546bb22-6407-4505-bd4b-0996daabbd6b tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.103s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 929.593306] env[69367]: DEBUG oslo_concurrency.lockutils [None req-8546bb22-6407-4505-bd4b-0996daabbd6b tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 929.595328] env[69367]: DEBUG oslo_concurrency.lockutils [None req-c1478397-0ed0-419b-94b5-970c8f06f7f3 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.318s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 929.595590] env[69367]: DEBUG oslo_concurrency.lockutils [None req-c1478397-0ed0-419b-94b5-970c8f06f7f3 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 929.597671] env[69367]: DEBUG oslo_concurrency.lockutils [None req-3de08db1-c559-428d-b2dc-e4c3733e154b tempest-ServerActionsTestJSON-1724285782 tempest-ServerActionsTestJSON-1724285782-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.291s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 929.599519] env[69367]: INFO nova.compute.claims [None req-3de08db1-c559-428d-b2dc-e4c3733e154b tempest-ServerActionsTestJSON-1724285782 tempest-ServerActionsTestJSON-1724285782-project-member] [instance: 64bf5848-e98b-47bb-a61f-7e9afce5bded] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 929.602542] env[69367]: DEBUG nova.compute.manager [None req-661b4779-e44f-4925-a887-df3506a5c644 tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [instance: 46237e91-87ba-4b91-af8c-84d8dde87508] Build of instance 46237e91-87ba-4b91-af8c-84d8dde87508 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 929.603728] env[69367]: DEBUG nova.compute.manager [None req-661b4779-e44f-4925-a887-df3506a5c644 tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [instance: 46237e91-87ba-4b91-af8c-84d8dde87508] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 929.603982] env[69367]: DEBUG oslo_concurrency.lockutils [None req-661b4779-e44f-4925-a887-df3506a5c644 tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Acquiring lock "refresh_cache-46237e91-87ba-4b91-af8c-84d8dde87508" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 929.604159] env[69367]: DEBUG oslo_concurrency.lockutils [None req-661b4779-e44f-4925-a887-df3506a5c644 tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Acquired lock "refresh_cache-46237e91-87ba-4b91-af8c-84d8dde87508" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 929.604327] env[69367]: DEBUG nova.network.neutron [None req-661b4779-e44f-4925-a887-df3506a5c644 tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [instance: 46237e91-87ba-4b91-af8c-84d8dde87508] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 929.620771] env[69367]: INFO nova.scheduler.client.report [None req-8546bb22-6407-4505-bd4b-0996daabbd6b tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Deleted allocations for instance d2f8328d-fd05-4e63-9cbd-a6e3ec948964 [ 929.627162] env[69367]: INFO nova.scheduler.client.report [None req-c1478397-0ed0-419b-94b5-970c8f06f7f3 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Deleted allocations for instance 652e2e23-7927-46ce-b8af-fffdb6ac8a3e [ 929.720589] env[69367]: INFO nova.compute.manager [None req-df57d2c5-5562-4103-b0a0-49bd43abf9bb tempest-ServerGroupTestJSON-565755592 tempest-ServerGroupTestJSON-565755592-project-member] [instance: a1714871-0888-4957-9f26-18ddc4b73ecd] Took 1.03 seconds to deallocate network for instance. [ 929.725225] env[69367]: DEBUG oslo_concurrency.lockutils [req-1002ff78-49e6-4e51-9f33-684e3cc16f58 req-cac5f07a-92c4-44a4-af6e-46a450979b48 service nova] Releasing lock "refresh_cache-42db60d9-e5f7-4925-8f6f-d3884687414a" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 929.753035] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 929.753517] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 929.818158] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0efb197e-a720-4f03-ab76-c5acead3391d tempest-InstanceActionsV221TestJSON-982944583 tempest-InstanceActionsV221TestJSON-982944583-project-member] Acquiring lock "2d5d8a3c-1fd7-46a4-9d27-dbe4d7e5b10f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 929.818418] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0efb197e-a720-4f03-ab76-c5acead3391d tempest-InstanceActionsV221TestJSON-982944583 tempest-InstanceActionsV221TestJSON-982944583-project-member] Lock "2d5d8a3c-1fd7-46a4-9d27-dbe4d7e5b10f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 929.973555] env[69367]: DEBUG nova.network.neutron [None req-a6a6eb6c-a52f-4b85-9cc4-e1ae3d0ed55a tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 930.065178] env[69367]: DEBUG nova.network.neutron [None req-a6a6eb6c-a52f-4b85-9cc4-e1ae3d0ed55a tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 930.128467] env[69367]: DEBUG oslo_concurrency.lockutils [None req-8546bb22-6407-4505-bd4b-0996daabbd6b tempest-AttachVolumeNegativeTest-2041104029 tempest-AttachVolumeNegativeTest-2041104029-project-member] Lock "d2f8328d-fd05-4e63-9cbd-a6e3ec948964" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.876s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 930.135946] env[69367]: DEBUG oslo_concurrency.lockutils [None req-c1478397-0ed0-419b-94b5-970c8f06f7f3 tempest-ServersTestMultiNic-606427570 tempest-ServersTestMultiNic-606427570-project-member] Lock "652e2e23-7927-46ce-b8af-fffdb6ac8a3e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.221s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 930.142752] env[69367]: DEBUG nova.network.neutron [None req-661b4779-e44f-4925-a887-df3506a5c644 tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [instance: 46237e91-87ba-4b91-af8c-84d8dde87508] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 930.238959] env[69367]: DEBUG nova.network.neutron [None req-661b4779-e44f-4925-a887-df3506a5c644 tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [instance: 46237e91-87ba-4b91-af8c-84d8dde87508] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 930.268519] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 930.268519] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 930.268519] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 930.268519] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 930.268519] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 930.268519] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 930.268519] env[69367]: DEBUG nova.compute.manager [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69367) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11183}} [ 930.268519] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 930.324111] env[69367]: DEBUG nova.compute.manager [None req-0efb197e-a720-4f03-ab76-c5acead3391d tempest-InstanceActionsV221TestJSON-982944583 tempest-InstanceActionsV221TestJSON-982944583-project-member] [instance: 2d5d8a3c-1fd7-46a4-9d27-dbe4d7e5b10f] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 930.572427] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a6a6eb6c-a52f-4b85-9cc4-e1ae3d0ed55a tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Releasing lock "refresh_cache-837b4093-308b-440b-940d-fc0227a5c590" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 930.572427] env[69367]: DEBUG nova.compute.manager [None req-a6a6eb6c-a52f-4b85-9cc4-e1ae3d0ed55a tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Start destroying the instance on the hypervisor. {{(pid=69367) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 930.572427] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-a6a6eb6c-a52f-4b85-9cc4-e1ae3d0ed55a tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Destroying instance {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 930.572427] env[69367]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8cfd10d0-d31d-4975-b11b-c41afac8d6cc {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.583870] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-415c9818-146f-47c8-a10b-ab7077b86a36 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.603632] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e99d75b2-5745-48af-ae94-7212b7d4d05c tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Acquiring lock "f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 930.604138] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e99d75b2-5745-48af-ae94-7212b7d4d05c tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Lock "f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 930.605797] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e99d75b2-5745-48af-ae94-7212b7d4d05c tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Acquiring lock "f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 930.606887] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e99d75b2-5745-48af-ae94-7212b7d4d05c tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Lock "f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.002s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 930.606887] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e99d75b2-5745-48af-ae94-7212b7d4d05c tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Lock "f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 930.610911] env[69367]: INFO nova.compute.manager [None req-e99d75b2-5745-48af-ae94-7212b7d4d05c tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] Terminating instance [ 930.629240] env[69367]: WARNING nova.virt.vmwareapi.vmops [None req-a6a6eb6c-a52f-4b85-9cc4-e1ae3d0ed55a tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 837b4093-308b-440b-940d-fc0227a5c590 could not be found. [ 930.629847] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-a6a6eb6c-a52f-4b85-9cc4-e1ae3d0ed55a tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Instance destroyed {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 930.630210] env[69367]: INFO nova.compute.manager [None req-a6a6eb6c-a52f-4b85-9cc4-e1ae3d0ed55a tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Took 0.06 seconds to destroy the instance on the hypervisor. [ 930.630620] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a6a6eb6c-a52f-4b85-9cc4-e1ae3d0ed55a tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 930.633019] env[69367]: DEBUG nova.compute.manager [-] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 930.633019] env[69367]: DEBUG nova.network.neutron [-] [instance: 837b4093-308b-440b-940d-fc0227a5c590] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 930.641856] env[69367]: DEBUG nova.scheduler.client.report [None req-3de08db1-c559-428d-b2dc-e4c3733e154b tempest-ServerActionsTestJSON-1724285782 tempest-ServerActionsTestJSON-1724285782-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 930.655874] env[69367]: DEBUG nova.network.neutron [-] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 930.669973] env[69367]: DEBUG nova.scheduler.client.report [None req-3de08db1-c559-428d-b2dc-e4c3733e154b tempest-ServerActionsTestJSON-1724285782 tempest-ServerActionsTestJSON-1724285782-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 930.670237] env[69367]: DEBUG nova.compute.provider_tree [None req-3de08db1-c559-428d-b2dc-e4c3733e154b tempest-ServerActionsTestJSON-1724285782 tempest-ServerActionsTestJSON-1724285782-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 930.695173] env[69367]: DEBUG nova.scheduler.client.report [None req-3de08db1-c559-428d-b2dc-e4c3733e154b tempest-ServerActionsTestJSON-1724285782 tempest-ServerActionsTestJSON-1724285782-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 930.720418] env[69367]: DEBUG nova.scheduler.client.report [None req-3de08db1-c559-428d-b2dc-e4c3733e154b tempest-ServerActionsTestJSON-1724285782 tempest-ServerActionsTestJSON-1724285782-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 930.741402] env[69367]: DEBUG oslo_concurrency.lockutils [None req-661b4779-e44f-4925-a887-df3506a5c644 tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Releasing lock "refresh_cache-46237e91-87ba-4b91-af8c-84d8dde87508" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 930.741656] env[69367]: DEBUG nova.compute.manager [None req-661b4779-e44f-4925-a887-df3506a5c644 tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 930.741841] env[69367]: DEBUG nova.compute.manager [None req-661b4779-e44f-4925-a887-df3506a5c644 tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [instance: 46237e91-87ba-4b91-af8c-84d8dde87508] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 930.742021] env[69367]: DEBUG nova.network.neutron [None req-661b4779-e44f-4925-a887-df3506a5c644 tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [instance: 46237e91-87ba-4b91-af8c-84d8dde87508] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 930.769736] env[69367]: INFO nova.scheduler.client.report [None req-df57d2c5-5562-4103-b0a0-49bd43abf9bb tempest-ServerGroupTestJSON-565755592 tempest-ServerGroupTestJSON-565755592-project-member] Deleted allocations for instance a1714871-0888-4957-9f26-18ddc4b73ecd [ 930.775742] env[69367]: DEBUG nova.network.neutron [None req-661b4779-e44f-4925-a887-df3506a5c644 tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [instance: 46237e91-87ba-4b91-af8c-84d8dde87508] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 930.777552] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 930.845128] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0efb197e-a720-4f03-ab76-c5acead3391d tempest-InstanceActionsV221TestJSON-982944583 tempest-InstanceActionsV221TestJSON-982944583-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 931.083872] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21144be6-ed8f-4faf-aaa7-a0c9a154f23d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.092496] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11c8058d-df12-4aa0-8663-ff30f3143399 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.127690] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e99d75b2-5745-48af-ae94-7212b7d4d05c tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Acquiring lock "refresh_cache-f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 931.128074] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e99d75b2-5745-48af-ae94-7212b7d4d05c tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Acquired lock "refresh_cache-f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 931.128252] env[69367]: DEBUG nova.network.neutron [None req-e99d75b2-5745-48af-ae94-7212b7d4d05c tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 931.130959] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbcfa79b-5180-433c-b8a2-aa3828ff3af4 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.141183] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecc05f59-fda0-417e-a972-2fb753405b67 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.156892] env[69367]: DEBUG nova.compute.provider_tree [None req-3de08db1-c559-428d-b2dc-e4c3733e154b tempest-ServerActionsTestJSON-1724285782 tempest-ServerActionsTestJSON-1724285782-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 931.158404] env[69367]: DEBUG nova.network.neutron [-] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 931.282229] env[69367]: DEBUG nova.network.neutron [None req-661b4779-e44f-4925-a887-df3506a5c644 tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [instance: 46237e91-87ba-4b91-af8c-84d8dde87508] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 931.284009] env[69367]: DEBUG oslo_concurrency.lockutils [None req-df57d2c5-5562-4103-b0a0-49bd43abf9bb tempest-ServerGroupTestJSON-565755592 tempest-ServerGroupTestJSON-565755592-project-member] Lock "a1714871-0888-4957-9f26-18ddc4b73ecd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 9.108s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 931.651309] env[69367]: DEBUG nova.network.neutron [None req-e99d75b2-5745-48af-ae94-7212b7d4d05c tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 931.664685] env[69367]: INFO nova.compute.manager [-] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Took 1.03 seconds to deallocate network for instance. [ 931.687917] env[69367]: ERROR nova.scheduler.client.report [None req-3de08db1-c559-428d-b2dc-e4c3733e154b tempest-ServerActionsTestJSON-1724285782 tempest-ServerActionsTestJSON-1724285782-project-member] [req-2cc92ea5-db9b-4046-ac24-b7fe17b66823] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-2cc92ea5-db9b-4046-ac24-b7fe17b66823"}]} [ 931.688188] env[69367]: DEBUG oslo_concurrency.lockutils [None req-3de08db1-c559-428d-b2dc-e4c3733e154b tempest-ServerActionsTestJSON-1724285782 tempest-ServerActionsTestJSON-1724285782-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.090s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 931.689605] env[69367]: ERROR nova.compute.manager [None req-3de08db1-c559-428d-b2dc-e4c3733e154b tempest-ServerActionsTestJSON-1724285782 tempest-ServerActionsTestJSON-1724285782-project-member] [instance: 64bf5848-e98b-47bb-a61f-7e9afce5bded] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 931.689605] env[69367]: ERROR nova.compute.manager [instance: 64bf5848-e98b-47bb-a61f-7e9afce5bded] Traceback (most recent call last): [ 931.689605] env[69367]: ERROR nova.compute.manager [instance: 64bf5848-e98b-47bb-a61f-7e9afce5bded] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 931.689605] env[69367]: ERROR nova.compute.manager [instance: 64bf5848-e98b-47bb-a61f-7e9afce5bded] yield [ 931.689605] env[69367]: ERROR nova.compute.manager [instance: 64bf5848-e98b-47bb-a61f-7e9afce5bded] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 931.689605] env[69367]: ERROR nova.compute.manager [instance: 64bf5848-e98b-47bb-a61f-7e9afce5bded] self.set_inventory_for_provider( [ 931.689605] env[69367]: ERROR nova.compute.manager [instance: 64bf5848-e98b-47bb-a61f-7e9afce5bded] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 931.689605] env[69367]: ERROR nova.compute.manager [instance: 64bf5848-e98b-47bb-a61f-7e9afce5bded] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 931.689605] env[69367]: ERROR nova.compute.manager [instance: 64bf5848-e98b-47bb-a61f-7e9afce5bded] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-2cc92ea5-db9b-4046-ac24-b7fe17b66823"}]} [ 931.689605] env[69367]: ERROR nova.compute.manager [instance: 64bf5848-e98b-47bb-a61f-7e9afce5bded] [ 931.689605] env[69367]: ERROR nova.compute.manager [instance: 64bf5848-e98b-47bb-a61f-7e9afce5bded] During handling of the above exception, another exception occurred: [ 931.689605] env[69367]: ERROR nova.compute.manager [instance: 64bf5848-e98b-47bb-a61f-7e9afce5bded] [ 931.689605] env[69367]: ERROR nova.compute.manager [instance: 64bf5848-e98b-47bb-a61f-7e9afce5bded] Traceback (most recent call last): [ 931.689605] env[69367]: ERROR nova.compute.manager [instance: 64bf5848-e98b-47bb-a61f-7e9afce5bded] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 931.689605] env[69367]: ERROR nova.compute.manager [instance: 64bf5848-e98b-47bb-a61f-7e9afce5bded] with self.rt.instance_claim(context, instance, node, allocs, [ 931.689605] env[69367]: ERROR nova.compute.manager [instance: 64bf5848-e98b-47bb-a61f-7e9afce5bded] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 931.689605] env[69367]: ERROR nova.compute.manager [instance: 64bf5848-e98b-47bb-a61f-7e9afce5bded] return f(*args, **kwargs) [ 931.689605] env[69367]: ERROR nova.compute.manager [instance: 64bf5848-e98b-47bb-a61f-7e9afce5bded] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 931.689605] env[69367]: ERROR nova.compute.manager [instance: 64bf5848-e98b-47bb-a61f-7e9afce5bded] self._update(elevated, cn) [ 931.689605] env[69367]: ERROR nova.compute.manager [instance: 64bf5848-e98b-47bb-a61f-7e9afce5bded] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 931.689605] env[69367]: ERROR nova.compute.manager [instance: 64bf5848-e98b-47bb-a61f-7e9afce5bded] self._update_to_placement(context, compute_node, startup) [ 931.689605] env[69367]: ERROR nova.compute.manager [instance: 64bf5848-e98b-47bb-a61f-7e9afce5bded] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 931.689605] env[69367]: ERROR nova.compute.manager [instance: 64bf5848-e98b-47bb-a61f-7e9afce5bded] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 931.689605] env[69367]: ERROR nova.compute.manager [instance: 64bf5848-e98b-47bb-a61f-7e9afce5bded] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 931.689605] env[69367]: ERROR nova.compute.manager [instance: 64bf5848-e98b-47bb-a61f-7e9afce5bded] return attempt.get(self._wrap_exception) [ 931.689605] env[69367]: ERROR nova.compute.manager [instance: 64bf5848-e98b-47bb-a61f-7e9afce5bded] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 931.689605] env[69367]: ERROR nova.compute.manager [instance: 64bf5848-e98b-47bb-a61f-7e9afce5bded] six.reraise(self.value[0], self.value[1], self.value[2]) [ 931.689605] env[69367]: ERROR nova.compute.manager [instance: 64bf5848-e98b-47bb-a61f-7e9afce5bded] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 931.689605] env[69367]: ERROR nova.compute.manager [instance: 64bf5848-e98b-47bb-a61f-7e9afce5bded] raise value [ 931.689605] env[69367]: ERROR nova.compute.manager [instance: 64bf5848-e98b-47bb-a61f-7e9afce5bded] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 931.689605] env[69367]: ERROR nova.compute.manager [instance: 64bf5848-e98b-47bb-a61f-7e9afce5bded] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 931.689605] env[69367]: ERROR nova.compute.manager [instance: 64bf5848-e98b-47bb-a61f-7e9afce5bded] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 931.689605] env[69367]: ERROR nova.compute.manager [instance: 64bf5848-e98b-47bb-a61f-7e9afce5bded] self.reportclient.update_from_provider_tree( [ 931.689605] env[69367]: ERROR nova.compute.manager [instance: 64bf5848-e98b-47bb-a61f-7e9afce5bded] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 931.690612] env[69367]: ERROR nova.compute.manager [instance: 64bf5848-e98b-47bb-a61f-7e9afce5bded] with catch_all(pd.uuid): [ 931.690612] env[69367]: ERROR nova.compute.manager [instance: 64bf5848-e98b-47bb-a61f-7e9afce5bded] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 931.690612] env[69367]: ERROR nova.compute.manager [instance: 64bf5848-e98b-47bb-a61f-7e9afce5bded] self.gen.throw(typ, value, traceback) [ 931.690612] env[69367]: ERROR nova.compute.manager [instance: 64bf5848-e98b-47bb-a61f-7e9afce5bded] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 931.690612] env[69367]: ERROR nova.compute.manager [instance: 64bf5848-e98b-47bb-a61f-7e9afce5bded] raise exception.ResourceProviderSyncFailed() [ 931.690612] env[69367]: ERROR nova.compute.manager [instance: 64bf5848-e98b-47bb-a61f-7e9afce5bded] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 931.690612] env[69367]: ERROR nova.compute.manager [instance: 64bf5848-e98b-47bb-a61f-7e9afce5bded] [ 931.690612] env[69367]: DEBUG nova.compute.utils [None req-3de08db1-c559-428d-b2dc-e4c3733e154b tempest-ServerActionsTestJSON-1724285782 tempest-ServerActionsTestJSON-1724285782-project-member] [instance: 64bf5848-e98b-47bb-a61f-7e9afce5bded] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 931.690612] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.913s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 931.690852] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 931.690885] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69367) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 931.694296] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0efb197e-a720-4f03-ab76-c5acead3391d tempest-InstanceActionsV221TestJSON-982944583 tempest-InstanceActionsV221TestJSON-982944583-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.846s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 931.694296] env[69367]: INFO nova.compute.claims [None req-0efb197e-a720-4f03-ab76-c5acead3391d tempest-InstanceActionsV221TestJSON-982944583 tempest-InstanceActionsV221TestJSON-982944583-project-member] [instance: 2d5d8a3c-1fd7-46a4-9d27-dbe4d7e5b10f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 931.697183] env[69367]: DEBUG nova.compute.manager [None req-3de08db1-c559-428d-b2dc-e4c3733e154b tempest-ServerActionsTestJSON-1724285782 tempest-ServerActionsTestJSON-1724285782-project-member] [instance: 64bf5848-e98b-47bb-a61f-7e9afce5bded] Build of instance 64bf5848-e98b-47bb-a61f-7e9afce5bded was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 931.697700] env[69367]: DEBUG nova.compute.manager [None req-3de08db1-c559-428d-b2dc-e4c3733e154b tempest-ServerActionsTestJSON-1724285782 tempest-ServerActionsTestJSON-1724285782-project-member] [instance: 64bf5848-e98b-47bb-a61f-7e9afce5bded] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 931.697891] env[69367]: DEBUG oslo_concurrency.lockutils [None req-3de08db1-c559-428d-b2dc-e4c3733e154b tempest-ServerActionsTestJSON-1724285782 tempest-ServerActionsTestJSON-1724285782-project-member] Acquiring lock "refresh_cache-64bf5848-e98b-47bb-a61f-7e9afce5bded" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 931.698056] env[69367]: DEBUG oslo_concurrency.lockutils [None req-3de08db1-c559-428d-b2dc-e4c3733e154b tempest-ServerActionsTestJSON-1724285782 tempest-ServerActionsTestJSON-1724285782-project-member] Acquired lock "refresh_cache-64bf5848-e98b-47bb-a61f-7e9afce5bded" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 931.698216] env[69367]: DEBUG nova.network.neutron [None req-3de08db1-c559-428d-b2dc-e4c3733e154b tempest-ServerActionsTestJSON-1724285782 tempest-ServerActionsTestJSON-1724285782-project-member] [instance: 64bf5848-e98b-47bb-a61f-7e9afce5bded] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 931.699684] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28deffce-21d1-4ee8-8d87-29867056b8e5 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.710832] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e39fda4-7956-4ff3-9afe-815cb860c8bb {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.730955] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8fbdf23-196d-4755-8e7e-f9b808a57bd3 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.740158] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-944546b5-9b90-4dbc-86ad-2f21a81c3d44 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.746224] env[69367]: DEBUG nova.network.neutron [None req-e99d75b2-5745-48af-ae94-7212b7d4d05c tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 931.775147] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180403MB free_disk=0GB free_vcpus=48 pci_devices=None {{(pid=69367) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 931.775299] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 931.784971] env[69367]: INFO nova.compute.manager [None req-661b4779-e44f-4925-a887-df3506a5c644 tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [instance: 46237e91-87ba-4b91-af8c-84d8dde87508] Took 1.04 seconds to deallocate network for instance. [ 932.176207] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a6a6eb6c-a52f-4b85-9cc4-e1ae3d0ed55a tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 932.231566] env[69367]: DEBUG nova.network.neutron [None req-3de08db1-c559-428d-b2dc-e4c3733e154b tempest-ServerActionsTestJSON-1724285782 tempest-ServerActionsTestJSON-1724285782-project-member] [instance: 64bf5848-e98b-47bb-a61f-7e9afce5bded] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 932.278021] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e99d75b2-5745-48af-ae94-7212b7d4d05c tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Releasing lock "refresh_cache-f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 932.278021] env[69367]: DEBUG nova.compute.manager [None req-e99d75b2-5745-48af-ae94-7212b7d4d05c tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] Start destroying the instance on the hypervisor. {{(pid=69367) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 932.278021] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-e99d75b2-5745-48af-ae94-7212b7d4d05c tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] Destroying instance {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 932.278021] env[69367]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5154306e-ea16-4268-9c51-adab251160fe {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.286724] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77ffb9fc-3234-493c-87c3-632aeed86fa8 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.327237] env[69367]: WARNING nova.virt.vmwareapi.vmops [None req-e99d75b2-5745-48af-ae94-7212b7d4d05c tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3 could not be found. [ 932.327237] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-e99d75b2-5745-48af-ae94-7212b7d4d05c tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] Instance destroyed {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 932.327237] env[69367]: INFO nova.compute.manager [None req-e99d75b2-5745-48af-ae94-7212b7d4d05c tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] Took 0.05 seconds to destroy the instance on the hypervisor. [ 932.327237] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e99d75b2-5745-48af-ae94-7212b7d4d05c tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 932.327237] env[69367]: DEBUG nova.compute.manager [-] [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 932.327237] env[69367]: DEBUG nova.network.neutron [-] [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 932.359705] env[69367]: DEBUG nova.network.neutron [-] [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 932.388459] env[69367]: DEBUG nova.network.neutron [None req-3de08db1-c559-428d-b2dc-e4c3733e154b tempest-ServerActionsTestJSON-1724285782 tempest-ServerActionsTestJSON-1724285782-project-member] [instance: 64bf5848-e98b-47bb-a61f-7e9afce5bded] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 932.746636] env[69367]: DEBUG nova.scheduler.client.report [None req-0efb197e-a720-4f03-ab76-c5acead3391d tempest-InstanceActionsV221TestJSON-982944583 tempest-InstanceActionsV221TestJSON-982944583-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 932.773652] env[69367]: DEBUG nova.scheduler.client.report [None req-0efb197e-a720-4f03-ab76-c5acead3391d tempest-InstanceActionsV221TestJSON-982944583 tempest-InstanceActionsV221TestJSON-982944583-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 932.773652] env[69367]: DEBUG nova.compute.provider_tree [None req-0efb197e-a720-4f03-ab76-c5acead3391d tempest-InstanceActionsV221TestJSON-982944583 tempest-InstanceActionsV221TestJSON-982944583-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 932.787050] env[69367]: DEBUG nova.scheduler.client.report [None req-0efb197e-a720-4f03-ab76-c5acead3391d tempest-InstanceActionsV221TestJSON-982944583 tempest-InstanceActionsV221TestJSON-982944583-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 932.807459] env[69367]: DEBUG nova.scheduler.client.report [None req-0efb197e-a720-4f03-ab76-c5acead3391d tempest-InstanceActionsV221TestJSON-982944583 tempest-InstanceActionsV221TestJSON-982944583-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 932.838856] env[69367]: INFO nova.scheduler.client.report [None req-661b4779-e44f-4925-a887-df3506a5c644 tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Deleted allocations for instance 46237e91-87ba-4b91-af8c-84d8dde87508 [ 932.860956] env[69367]: DEBUG nova.network.neutron [-] [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 932.890441] env[69367]: DEBUG oslo_concurrency.lockutils [None req-3de08db1-c559-428d-b2dc-e4c3733e154b tempest-ServerActionsTestJSON-1724285782 tempest-ServerActionsTestJSON-1724285782-project-member] Releasing lock "refresh_cache-64bf5848-e98b-47bb-a61f-7e9afce5bded" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 932.890524] env[69367]: DEBUG nova.compute.manager [None req-3de08db1-c559-428d-b2dc-e4c3733e154b tempest-ServerActionsTestJSON-1724285782 tempest-ServerActionsTestJSON-1724285782-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 932.890710] env[69367]: DEBUG nova.compute.manager [None req-3de08db1-c559-428d-b2dc-e4c3733e154b tempest-ServerActionsTestJSON-1724285782 tempest-ServerActionsTestJSON-1724285782-project-member] [instance: 64bf5848-e98b-47bb-a61f-7e9afce5bded] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 932.890869] env[69367]: DEBUG nova.network.neutron [None req-3de08db1-c559-428d-b2dc-e4c3733e154b tempest-ServerActionsTestJSON-1724285782 tempest-ServerActionsTestJSON-1724285782-project-member] [instance: 64bf5848-e98b-47bb-a61f-7e9afce5bded] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 932.908032] env[69367]: DEBUG nova.network.neutron [None req-3de08db1-c559-428d-b2dc-e4c3733e154b tempest-ServerActionsTestJSON-1724285782 tempest-ServerActionsTestJSON-1724285782-project-member] [instance: 64bf5848-e98b-47bb-a61f-7e9afce5bded] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 933.178872] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2219cda-27b0-4b34-b539-a75a89b0ea87 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.188635] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3232bd59-3892-45f9-9748-230ad1e3bab3 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.231270] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb43ff20-d300-4679-a49c-f5a79d28549d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.240036] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1540f2f1-3437-43bf-addc-a6caa6d40389 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.255843] env[69367]: DEBUG nova.compute.provider_tree [None req-0efb197e-a720-4f03-ab76-c5acead3391d tempest-InstanceActionsV221TestJSON-982944583 tempest-InstanceActionsV221TestJSON-982944583-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 933.347348] env[69367]: DEBUG oslo_concurrency.lockutils [None req-661b4779-e44f-4925-a887-df3506a5c644 tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Lock "46237e91-87ba-4b91-af8c-84d8dde87508" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 10.135s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 933.364079] env[69367]: INFO nova.compute.manager [-] [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] Took 1.04 seconds to deallocate network for instance. [ 933.411260] env[69367]: DEBUG nova.network.neutron [None req-3de08db1-c559-428d-b2dc-e4c3733e154b tempest-ServerActionsTestJSON-1724285782 tempest-ServerActionsTestJSON-1724285782-project-member] [instance: 64bf5848-e98b-47bb-a61f-7e9afce5bded] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 933.783478] env[69367]: ERROR nova.scheduler.client.report [None req-0efb197e-a720-4f03-ab76-c5acead3391d tempest-InstanceActionsV221TestJSON-982944583 tempest-InstanceActionsV221TestJSON-982944583-project-member] [req-8eb5510b-6959-48d8-8d5a-7d5769d94d21] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-8eb5510b-6959-48d8-8d5a-7d5769d94d21"}]} [ 933.783865] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0efb197e-a720-4f03-ab76-c5acead3391d tempest-InstanceActionsV221TestJSON-982944583 tempest-InstanceActionsV221TestJSON-982944583-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.093s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 933.784517] env[69367]: ERROR nova.compute.manager [None req-0efb197e-a720-4f03-ab76-c5acead3391d tempest-InstanceActionsV221TestJSON-982944583 tempest-InstanceActionsV221TestJSON-982944583-project-member] [instance: 2d5d8a3c-1fd7-46a4-9d27-dbe4d7e5b10f] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 933.784517] env[69367]: ERROR nova.compute.manager [instance: 2d5d8a3c-1fd7-46a4-9d27-dbe4d7e5b10f] Traceback (most recent call last): [ 933.784517] env[69367]: ERROR nova.compute.manager [instance: 2d5d8a3c-1fd7-46a4-9d27-dbe4d7e5b10f] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 933.784517] env[69367]: ERROR nova.compute.manager [instance: 2d5d8a3c-1fd7-46a4-9d27-dbe4d7e5b10f] yield [ 933.784517] env[69367]: ERROR nova.compute.manager [instance: 2d5d8a3c-1fd7-46a4-9d27-dbe4d7e5b10f] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 933.784517] env[69367]: ERROR nova.compute.manager [instance: 2d5d8a3c-1fd7-46a4-9d27-dbe4d7e5b10f] self.set_inventory_for_provider( [ 933.784517] env[69367]: ERROR nova.compute.manager [instance: 2d5d8a3c-1fd7-46a4-9d27-dbe4d7e5b10f] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 933.784517] env[69367]: ERROR nova.compute.manager [instance: 2d5d8a3c-1fd7-46a4-9d27-dbe4d7e5b10f] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 933.784517] env[69367]: ERROR nova.compute.manager [instance: 2d5d8a3c-1fd7-46a4-9d27-dbe4d7e5b10f] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-8eb5510b-6959-48d8-8d5a-7d5769d94d21"}]} [ 933.784517] env[69367]: ERROR nova.compute.manager [instance: 2d5d8a3c-1fd7-46a4-9d27-dbe4d7e5b10f] [ 933.784517] env[69367]: ERROR nova.compute.manager [instance: 2d5d8a3c-1fd7-46a4-9d27-dbe4d7e5b10f] During handling of the above exception, another exception occurred: [ 933.784517] env[69367]: ERROR nova.compute.manager [instance: 2d5d8a3c-1fd7-46a4-9d27-dbe4d7e5b10f] [ 933.784517] env[69367]: ERROR nova.compute.manager [instance: 2d5d8a3c-1fd7-46a4-9d27-dbe4d7e5b10f] Traceback (most recent call last): [ 933.784517] env[69367]: ERROR nova.compute.manager [instance: 2d5d8a3c-1fd7-46a4-9d27-dbe4d7e5b10f] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 933.784517] env[69367]: ERROR nova.compute.manager [instance: 2d5d8a3c-1fd7-46a4-9d27-dbe4d7e5b10f] with self.rt.instance_claim(context, instance, node, allocs, [ 933.784517] env[69367]: ERROR nova.compute.manager [instance: 2d5d8a3c-1fd7-46a4-9d27-dbe4d7e5b10f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 933.784517] env[69367]: ERROR nova.compute.manager [instance: 2d5d8a3c-1fd7-46a4-9d27-dbe4d7e5b10f] return f(*args, **kwargs) [ 933.784517] env[69367]: ERROR nova.compute.manager [instance: 2d5d8a3c-1fd7-46a4-9d27-dbe4d7e5b10f] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 933.784517] env[69367]: ERROR nova.compute.manager [instance: 2d5d8a3c-1fd7-46a4-9d27-dbe4d7e5b10f] self._update(elevated, cn) [ 933.784517] env[69367]: ERROR nova.compute.manager [instance: 2d5d8a3c-1fd7-46a4-9d27-dbe4d7e5b10f] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 933.784517] env[69367]: ERROR nova.compute.manager [instance: 2d5d8a3c-1fd7-46a4-9d27-dbe4d7e5b10f] self._update_to_placement(context, compute_node, startup) [ 933.784517] env[69367]: ERROR nova.compute.manager [instance: 2d5d8a3c-1fd7-46a4-9d27-dbe4d7e5b10f] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 933.784517] env[69367]: ERROR nova.compute.manager [instance: 2d5d8a3c-1fd7-46a4-9d27-dbe4d7e5b10f] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 933.784517] env[69367]: ERROR nova.compute.manager [instance: 2d5d8a3c-1fd7-46a4-9d27-dbe4d7e5b10f] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 933.784517] env[69367]: ERROR nova.compute.manager [instance: 2d5d8a3c-1fd7-46a4-9d27-dbe4d7e5b10f] return attempt.get(self._wrap_exception) [ 933.784517] env[69367]: ERROR nova.compute.manager [instance: 2d5d8a3c-1fd7-46a4-9d27-dbe4d7e5b10f] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 933.784517] env[69367]: ERROR nova.compute.manager [instance: 2d5d8a3c-1fd7-46a4-9d27-dbe4d7e5b10f] six.reraise(self.value[0], self.value[1], self.value[2]) [ 933.784517] env[69367]: ERROR nova.compute.manager [instance: 2d5d8a3c-1fd7-46a4-9d27-dbe4d7e5b10f] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 933.784517] env[69367]: ERROR nova.compute.manager [instance: 2d5d8a3c-1fd7-46a4-9d27-dbe4d7e5b10f] raise value [ 933.784517] env[69367]: ERROR nova.compute.manager [instance: 2d5d8a3c-1fd7-46a4-9d27-dbe4d7e5b10f] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 933.784517] env[69367]: ERROR nova.compute.manager [instance: 2d5d8a3c-1fd7-46a4-9d27-dbe4d7e5b10f] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 933.784517] env[69367]: ERROR nova.compute.manager [instance: 2d5d8a3c-1fd7-46a4-9d27-dbe4d7e5b10f] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 933.784517] env[69367]: ERROR nova.compute.manager [instance: 2d5d8a3c-1fd7-46a4-9d27-dbe4d7e5b10f] self.reportclient.update_from_provider_tree( [ 933.784517] env[69367]: ERROR nova.compute.manager [instance: 2d5d8a3c-1fd7-46a4-9d27-dbe4d7e5b10f] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 933.785513] env[69367]: ERROR nova.compute.manager [instance: 2d5d8a3c-1fd7-46a4-9d27-dbe4d7e5b10f] with catch_all(pd.uuid): [ 933.785513] env[69367]: ERROR nova.compute.manager [instance: 2d5d8a3c-1fd7-46a4-9d27-dbe4d7e5b10f] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 933.785513] env[69367]: ERROR nova.compute.manager [instance: 2d5d8a3c-1fd7-46a4-9d27-dbe4d7e5b10f] self.gen.throw(typ, value, traceback) [ 933.785513] env[69367]: ERROR nova.compute.manager [instance: 2d5d8a3c-1fd7-46a4-9d27-dbe4d7e5b10f] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 933.785513] env[69367]: ERROR nova.compute.manager [instance: 2d5d8a3c-1fd7-46a4-9d27-dbe4d7e5b10f] raise exception.ResourceProviderSyncFailed() [ 933.785513] env[69367]: ERROR nova.compute.manager [instance: 2d5d8a3c-1fd7-46a4-9d27-dbe4d7e5b10f] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 933.785513] env[69367]: ERROR nova.compute.manager [instance: 2d5d8a3c-1fd7-46a4-9d27-dbe4d7e5b10f] [ 933.785513] env[69367]: DEBUG nova.compute.utils [None req-0efb197e-a720-4f03-ab76-c5acead3391d tempest-InstanceActionsV221TestJSON-982944583 tempest-InstanceActionsV221TestJSON-982944583-project-member] [instance: 2d5d8a3c-1fd7-46a4-9d27-dbe4d7e5b10f] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 933.786649] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 2.011s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 933.789726] env[69367]: DEBUG nova.compute.manager [None req-0efb197e-a720-4f03-ab76-c5acead3391d tempest-InstanceActionsV221TestJSON-982944583 tempest-InstanceActionsV221TestJSON-982944583-project-member] [instance: 2d5d8a3c-1fd7-46a4-9d27-dbe4d7e5b10f] Build of instance 2d5d8a3c-1fd7-46a4-9d27-dbe4d7e5b10f was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 933.789726] env[69367]: DEBUG nova.compute.manager [None req-0efb197e-a720-4f03-ab76-c5acead3391d tempest-InstanceActionsV221TestJSON-982944583 tempest-InstanceActionsV221TestJSON-982944583-project-member] [instance: 2d5d8a3c-1fd7-46a4-9d27-dbe4d7e5b10f] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 933.789726] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0efb197e-a720-4f03-ab76-c5acead3391d tempest-InstanceActionsV221TestJSON-982944583 tempest-InstanceActionsV221TestJSON-982944583-project-member] Acquiring lock "refresh_cache-2d5d8a3c-1fd7-46a4-9d27-dbe4d7e5b10f" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 933.789726] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0efb197e-a720-4f03-ab76-c5acead3391d tempest-InstanceActionsV221TestJSON-982944583 tempest-InstanceActionsV221TestJSON-982944583-project-member] Acquired lock "refresh_cache-2d5d8a3c-1fd7-46a4-9d27-dbe4d7e5b10f" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 933.789726] env[69367]: DEBUG nova.network.neutron [None req-0efb197e-a720-4f03-ab76-c5acead3391d tempest-InstanceActionsV221TestJSON-982944583 tempest-InstanceActionsV221TestJSON-982944583-project-member] [instance: 2d5d8a3c-1fd7-46a4-9d27-dbe4d7e5b10f] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 933.871986] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e99d75b2-5745-48af-ae94-7212b7d4d05c tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 933.914932] env[69367]: INFO nova.compute.manager [None req-3de08db1-c559-428d-b2dc-e4c3733e154b tempest-ServerActionsTestJSON-1724285782 tempest-ServerActionsTestJSON-1724285782-project-member] [instance: 64bf5848-e98b-47bb-a61f-7e9afce5bded] Took 1.02 seconds to deallocate network for instance. [ 934.336385] env[69367]: DEBUG nova.network.neutron [None req-0efb197e-a720-4f03-ab76-c5acead3391d tempest-InstanceActionsV221TestJSON-982944583 tempest-InstanceActionsV221TestJSON-982944583-project-member] [instance: 2d5d8a3c-1fd7-46a4-9d27-dbe4d7e5b10f] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 934.425283] env[69367]: DEBUG nova.network.neutron [None req-0efb197e-a720-4f03-ab76-c5acead3391d tempest-InstanceActionsV221TestJSON-982944583 tempest-InstanceActionsV221TestJSON-982944583-project-member] [instance: 2d5d8a3c-1fd7-46a4-9d27-dbe4d7e5b10f] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 934.769185] env[69367]: DEBUG oslo_concurrency.lockutils [None req-62a9025e-4e99-4b4d-805c-b2d2b6fb4856 tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Acquiring lock "cf8cf8fe-86a0-47f4-b84d-ed9ed046b1fb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 934.769426] env[69367]: DEBUG oslo_concurrency.lockutils [None req-62a9025e-4e99-4b4d-805c-b2d2b6fb4856 tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Lock "cf8cf8fe-86a0-47f4-b84d-ed9ed046b1fb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 934.930869] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0efb197e-a720-4f03-ab76-c5acead3391d tempest-InstanceActionsV221TestJSON-982944583 tempest-InstanceActionsV221TestJSON-982944583-project-member] Releasing lock "refresh_cache-2d5d8a3c-1fd7-46a4-9d27-dbe4d7e5b10f" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 934.931266] env[69367]: DEBUG nova.compute.manager [None req-0efb197e-a720-4f03-ab76-c5acead3391d tempest-InstanceActionsV221TestJSON-982944583 tempest-InstanceActionsV221TestJSON-982944583-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 934.931618] env[69367]: DEBUG nova.compute.manager [None req-0efb197e-a720-4f03-ab76-c5acead3391d tempest-InstanceActionsV221TestJSON-982944583 tempest-InstanceActionsV221TestJSON-982944583-project-member] [instance: 2d5d8a3c-1fd7-46a4-9d27-dbe4d7e5b10f] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 934.931860] env[69367]: DEBUG nova.network.neutron [None req-0efb197e-a720-4f03-ab76-c5acead3391d tempest-InstanceActionsV221TestJSON-982944583 tempest-InstanceActionsV221TestJSON-982944583-project-member] [instance: 2d5d8a3c-1fd7-46a4-9d27-dbe4d7e5b10f] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 934.950273] env[69367]: DEBUG nova.network.neutron [None req-0efb197e-a720-4f03-ab76-c5acead3391d tempest-InstanceActionsV221TestJSON-982944583 tempest-InstanceActionsV221TestJSON-982944583-project-member] [instance: 2d5d8a3c-1fd7-46a4-9d27-dbe4d7e5b10f] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 934.952671] env[69367]: INFO nova.scheduler.client.report [None req-3de08db1-c559-428d-b2dc-e4c3733e154b tempest-ServerActionsTestJSON-1724285782 tempest-ServerActionsTestJSON-1724285782-project-member] Deleted allocations for instance 64bf5848-e98b-47bb-a61f-7e9afce5bded [ 935.272508] env[69367]: DEBUG nova.compute.manager [None req-62a9025e-4e99-4b4d-805c-b2d2b6fb4856 tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [instance: cf8cf8fe-86a0-47f4-b84d-ed9ed046b1fb] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 935.460119] env[69367]: DEBUG nova.network.neutron [None req-0efb197e-a720-4f03-ab76-c5acead3391d tempest-InstanceActionsV221TestJSON-982944583 tempest-InstanceActionsV221TestJSON-982944583-project-member] [instance: 2d5d8a3c-1fd7-46a4-9d27-dbe4d7e5b10f] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 935.461679] env[69367]: DEBUG oslo_concurrency.lockutils [None req-3de08db1-c559-428d-b2dc-e4c3733e154b tempest-ServerActionsTestJSON-1724285782 tempest-ServerActionsTestJSON-1724285782-project-member] Lock "64bf5848-e98b-47bb-a61f-7e9afce5bded" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 9.183s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 935.794784] env[69367]: DEBUG oslo_concurrency.lockutils [None req-62a9025e-4e99-4b4d-805c-b2d2b6fb4856 tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 935.964664] env[69367]: INFO nova.compute.manager [None req-0efb197e-a720-4f03-ab76-c5acead3391d tempest-InstanceActionsV221TestJSON-982944583 tempest-InstanceActionsV221TestJSON-982944583-project-member] [instance: 2d5d8a3c-1fd7-46a4-9d27-dbe4d7e5b10f] Took 1.03 seconds to deallocate network for instance. [ 936.356290] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance a358ce6d-9826-4ddb-8c2f-51bac8db59d4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 936.356480] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 4e346ed1-36e9-421d-975f-e8bb6f05c0a0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 936.356633] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance e1c7d100-4ad7-4871-970f-bb7562bfc6fc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 936.356769] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 936.356893] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance ab365570-ac29-4094-be4c-d49563a465c8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 936.357013] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance c17525ee-d038-4c81-932b-ed74a6de6cb5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 936.357188] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 92c27615-d377-492f-a9db-ff45b2e71537 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 936.357237] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 936.357352] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 011ab7de-98a7-41fc-9e05-e71965c73c09 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 936.357466] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance f8c07fa1-d27c-4d0f-847b-481477cd04bf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 936.357606] env[69367]: WARNING nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 837b4093-308b-440b-940d-fc0227a5c590 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 936.357722] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 7f937d89-684b-44f5-9f30-783aeafe99d1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 936.357833] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance ab9d8e3e-65c5-4ac9-920f-3042b8cf2054 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 936.357942] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 557dc011-44a1-4240-9596-d055d57e176f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 936.358061] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance c272b0ae-6313-46ab-977c-6de255e77675 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 936.358172] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance d900df05-b65c-4a45-94d1-563afbf9c022 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 936.358495] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 05aae150-5d86-4210-ae7e-8c63e83cb907 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 936.358671] env[69367]: WARNING nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 936.358795] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 42db60d9-e5f7-4925-8f6f-d3884687414a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 936.358918] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 2d5d8a3c-1fd7-46a4-9d27-dbe4d7e5b10f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 936.863155] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance cf8cf8fe-86a0-47f4-b84d-ed9ed046b1fb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 936.863505] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Total usable vcpus: 48, total allocated vcpus: 19 {{(pid=69367) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 936.863665] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=4160MB phys_disk=200GB used_disk=19GB total_vcpus=48 used_vcpus=19 pci_stats=[] {{(pid=69367) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 936.886633] env[69367]: DEBUG nova.scheduler.client.report [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 936.903309] env[69367]: DEBUG nova.scheduler.client.report [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 936.903736] env[69367]: DEBUG nova.compute.provider_tree [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 936.915580] env[69367]: DEBUG nova.scheduler.client.report [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 936.935778] env[69367]: DEBUG nova.scheduler.client.report [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 936.995240] env[69367]: INFO nova.scheduler.client.report [None req-0efb197e-a720-4f03-ab76-c5acead3391d tempest-InstanceActionsV221TestJSON-982944583 tempest-InstanceActionsV221TestJSON-982944583-project-member] Deleted allocations for instance 2d5d8a3c-1fd7-46a4-9d27-dbe4d7e5b10f [ 937.270855] env[69367]: DEBUG oslo_concurrency.lockutils [None req-8534761f-6aa5-4bed-b368-7732dd3653a0 tempest-ServerRescueNegativeTestJSON-403967957 tempest-ServerRescueNegativeTestJSON-403967957-project-member] Acquiring lock "b213cabf-af04-4599-ba76-d759d00f4bfc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 937.271627] env[69367]: DEBUG oslo_concurrency.lockutils [None req-8534761f-6aa5-4bed-b368-7732dd3653a0 tempest-ServerRescueNegativeTestJSON-403967957 tempest-ServerRescueNegativeTestJSON-403967957-project-member] Lock "b213cabf-af04-4599-ba76-d759d00f4bfc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 937.303145] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f93c877-1c09-405e-93b8-19b73d3f051f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.312035] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9b46ddb-e25d-4806-bcf3-30ac29fed423 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.344365] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dd2d173-66ca-4a2f-892c-df473167f19c {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.352861] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-080acab3-079d-4774-9d5e-9d45e51605ab {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.366454] env[69367]: DEBUG nova.compute.provider_tree [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 937.506426] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0efb197e-a720-4f03-ab76-c5acead3391d tempest-InstanceActionsV221TestJSON-982944583 tempest-InstanceActionsV221TestJSON-982944583-project-member] Lock "2d5d8a3c-1fd7-46a4-9d27-dbe4d7e5b10f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 7.688s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 937.740874] env[69367]: DEBUG oslo_concurrency.lockutils [None req-f0c7a18a-18a4-4870-96e7-c1f1a53505fa tempest-ServerRescueNegativeTestJSON-403967957 tempest-ServerRescueNegativeTestJSON-403967957-project-member] Acquiring lock "b401b3bf-4df9-40b7-bb75-e54a8417f397" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 937.742017] env[69367]: DEBUG oslo_concurrency.lockutils [None req-f0c7a18a-18a4-4870-96e7-c1f1a53505fa tempest-ServerRescueNegativeTestJSON-403967957 tempest-ServerRescueNegativeTestJSON-403967957-project-member] Lock "b401b3bf-4df9-40b7-bb75-e54a8417f397" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 937.776015] env[69367]: DEBUG nova.compute.manager [None req-8534761f-6aa5-4bed-b368-7732dd3653a0 tempest-ServerRescueNegativeTestJSON-403967957 tempest-ServerRescueNegativeTestJSON-403967957-project-member] [instance: b213cabf-af04-4599-ba76-d759d00f4bfc] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 937.891452] env[69367]: ERROR nova.scheduler.client.report [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] [req-33c7450c-964b-45b6-953f-45215c38f8b5] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-33c7450c-964b-45b6-953f-45215c38f8b5"}]} [ 937.892467] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 4.105s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 937.892467] env[69367]: ERROR nova.compute.manager [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Error updating resources for node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28.: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 937.892467] env[69367]: ERROR nova.compute.manager Traceback (most recent call last): [ 937.892467] env[69367]: ERROR nova.compute.manager File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 937.892467] env[69367]: ERROR nova.compute.manager yield [ 937.892467] env[69367]: ERROR nova.compute.manager File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 937.892467] env[69367]: ERROR nova.compute.manager self.set_inventory_for_provider( [ 937.892467] env[69367]: ERROR nova.compute.manager File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 937.892467] env[69367]: ERROR nova.compute.manager raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 937.892467] env[69367]: ERROR nova.compute.manager nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-33c7450c-964b-45b6-953f-45215c38f8b5"}]} [ 937.892467] env[69367]: ERROR nova.compute.manager [ 937.892467] env[69367]: ERROR nova.compute.manager During handling of the above exception, another exception occurred: [ 937.892467] env[69367]: ERROR nova.compute.manager [ 937.892467] env[69367]: ERROR nova.compute.manager Traceback (most recent call last): [ 937.892467] env[69367]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 11219, in _update_available_resource_for_node [ 937.892467] env[69367]: ERROR nova.compute.manager self.rt.update_available_resource(context, nodename, [ 937.892467] env[69367]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/resource_tracker.py", line 965, in update_available_resource [ 937.892467] env[69367]: ERROR nova.compute.manager self._update_available_resource(context, resources, startup=startup) [ 937.892467] env[69367]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 937.892467] env[69367]: ERROR nova.compute.manager return f(*args, **kwargs) [ 937.892467] env[69367]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1096, in _update_available_resource [ 937.892467] env[69367]: ERROR nova.compute.manager self._update(context, cn, startup=startup) [ 937.892467] env[69367]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 937.892467] env[69367]: ERROR nova.compute.manager self._update_to_placement(context, compute_node, startup) [ 937.892467] env[69367]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 937.892467] env[69367]: ERROR nova.compute.manager return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 937.892467] env[69367]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 937.892467] env[69367]: ERROR nova.compute.manager return attempt.get(self._wrap_exception) [ 937.892467] env[69367]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 937.892467] env[69367]: ERROR nova.compute.manager six.reraise(self.value[0], self.value[1], self.value[2]) [ 937.892467] env[69367]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 937.892467] env[69367]: ERROR nova.compute.manager raise value [ 937.892467] env[69367]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 937.892467] env[69367]: ERROR nova.compute.manager attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 937.892467] env[69367]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 937.892467] env[69367]: ERROR nova.compute.manager self.reportclient.update_from_provider_tree( [ 937.892467] env[69367]: ERROR nova.compute.manager File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 937.892467] env[69367]: ERROR nova.compute.manager with catch_all(pd.uuid): [ 937.892467] env[69367]: ERROR nova.compute.manager File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 937.892467] env[69367]: ERROR nova.compute.manager self.gen.throw(typ, value, traceback) [ 937.892467] env[69367]: ERROR nova.compute.manager File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 937.892467] env[69367]: ERROR nova.compute.manager raise exception.ResourceProviderSyncFailed() [ 937.892467] env[69367]: ERROR nova.compute.manager nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 937.892467] env[69367]: ERROR nova.compute.manager [ 937.895423] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a6a6eb6c-a52f-4b85-9cc4-e1ae3d0ed55a tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.717s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 937.895423] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a6a6eb6c-a52f-4b85-9cc4-e1ae3d0ed55a tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 937.895423] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e99d75b2-5745-48af-ae94-7212b7d4d05c tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.023s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 937.895423] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e99d75b2-5745-48af-ae94-7212b7d4d05c tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 937.898428] env[69367]: DEBUG oslo_concurrency.lockutils [None req-62a9025e-4e99-4b4d-805c-b2d2b6fb4856 tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.103s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 937.899987] env[69367]: INFO nova.compute.claims [None req-62a9025e-4e99-4b4d-805c-b2d2b6fb4856 tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [instance: cf8cf8fe-86a0-47f4-b84d-ed9ed046b1fb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 937.926793] env[69367]: INFO nova.scheduler.client.report [None req-e99d75b2-5745-48af-ae94-7212b7d4d05c tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Deleted allocations for instance f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3 [ 937.928669] env[69367]: INFO nova.scheduler.client.report [None req-a6a6eb6c-a52f-4b85-9cc4-e1ae3d0ed55a tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Deleted allocations for instance 837b4093-308b-440b-940d-fc0227a5c590 [ 938.246899] env[69367]: DEBUG nova.compute.manager [None req-f0c7a18a-18a4-4870-96e7-c1f1a53505fa tempest-ServerRescueNegativeTestJSON-403967957 tempest-ServerRescueNegativeTestJSON-403967957-project-member] [instance: b401b3bf-4df9-40b7-bb75-e54a8417f397] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 938.308612] env[69367]: DEBUG oslo_concurrency.lockutils [None req-8534761f-6aa5-4bed-b368-7732dd3653a0 tempest-ServerRescueNegativeTestJSON-403967957 tempest-ServerRescueNegativeTestJSON-403967957-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 938.443906] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e99d75b2-5745-48af-ae94-7212b7d4d05c tempest-ServersTestJSON-142381119 tempest-ServersTestJSON-142381119-project-member] Lock "f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.840s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 938.448162] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a6a6eb6c-a52f-4b85-9cc4-e1ae3d0ed55a tempest-AttachInterfacesTestJSON-2085020977 tempest-AttachInterfacesTestJSON-2085020977-project-member] Lock "837b4093-308b-440b-940d-fc0227a5c590" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.524s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 938.762657] env[69367]: DEBUG oslo_concurrency.lockutils [None req-f0c7a18a-18a4-4870-96e7-c1f1a53505fa tempest-ServerRescueNegativeTestJSON-403967957 tempest-ServerRescueNegativeTestJSON-403967957-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 938.934020] env[69367]: DEBUG nova.scheduler.client.report [None req-62a9025e-4e99-4b4d-805c-b2d2b6fb4856 tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 938.952468] env[69367]: DEBUG nova.scheduler.client.report [None req-62a9025e-4e99-4b4d-805c-b2d2b6fb4856 tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 938.952468] env[69367]: DEBUG nova.compute.provider_tree [None req-62a9025e-4e99-4b4d-805c-b2d2b6fb4856 tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 938.965883] env[69367]: DEBUG nova.scheduler.client.report [None req-62a9025e-4e99-4b4d-805c-b2d2b6fb4856 tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 938.990121] env[69367]: DEBUG nova.scheduler.client.report [None req-62a9025e-4e99-4b4d-805c-b2d2b6fb4856 tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 939.256726] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8087fb39-5409-4e3e-9154-f084f73186fc {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.267052] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4266f7bd-9ddf-4f0e-ac13-d20112574d6c {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.301690] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d37428dc-8362-47d0-8bb7-7aa44e634391 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.310263] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96f2ae76-0d07-4e29-8251-52b8fb9e3304 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.325527] env[69367]: DEBUG nova.compute.provider_tree [None req-62a9025e-4e99-4b4d-805c-b2d2b6fb4856 tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 939.851842] env[69367]: ERROR nova.scheduler.client.report [None req-62a9025e-4e99-4b4d-805c-b2d2b6fb4856 tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [req-87ea26a9-162c-4336-849d-19e32b8fe93e] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-87ea26a9-162c-4336-849d-19e32b8fe93e"}]} [ 939.852253] env[69367]: DEBUG oslo_concurrency.lockutils [None req-62a9025e-4e99-4b4d-805c-b2d2b6fb4856 tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.954s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 939.852861] env[69367]: ERROR nova.compute.manager [None req-62a9025e-4e99-4b4d-805c-b2d2b6fb4856 tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [instance: cf8cf8fe-86a0-47f4-b84d-ed9ed046b1fb] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 939.852861] env[69367]: ERROR nova.compute.manager [instance: cf8cf8fe-86a0-47f4-b84d-ed9ed046b1fb] Traceback (most recent call last): [ 939.852861] env[69367]: ERROR nova.compute.manager [instance: cf8cf8fe-86a0-47f4-b84d-ed9ed046b1fb] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 939.852861] env[69367]: ERROR nova.compute.manager [instance: cf8cf8fe-86a0-47f4-b84d-ed9ed046b1fb] yield [ 939.852861] env[69367]: ERROR nova.compute.manager [instance: cf8cf8fe-86a0-47f4-b84d-ed9ed046b1fb] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 939.852861] env[69367]: ERROR nova.compute.manager [instance: cf8cf8fe-86a0-47f4-b84d-ed9ed046b1fb] self.set_inventory_for_provider( [ 939.852861] env[69367]: ERROR nova.compute.manager [instance: cf8cf8fe-86a0-47f4-b84d-ed9ed046b1fb] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 939.852861] env[69367]: ERROR nova.compute.manager [instance: cf8cf8fe-86a0-47f4-b84d-ed9ed046b1fb] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 939.852861] env[69367]: ERROR nova.compute.manager [instance: cf8cf8fe-86a0-47f4-b84d-ed9ed046b1fb] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-87ea26a9-162c-4336-849d-19e32b8fe93e"}]} [ 939.852861] env[69367]: ERROR nova.compute.manager [instance: cf8cf8fe-86a0-47f4-b84d-ed9ed046b1fb] [ 939.852861] env[69367]: ERROR nova.compute.manager [instance: cf8cf8fe-86a0-47f4-b84d-ed9ed046b1fb] During handling of the above exception, another exception occurred: [ 939.852861] env[69367]: ERROR nova.compute.manager [instance: cf8cf8fe-86a0-47f4-b84d-ed9ed046b1fb] [ 939.852861] env[69367]: ERROR nova.compute.manager [instance: cf8cf8fe-86a0-47f4-b84d-ed9ed046b1fb] Traceback (most recent call last): [ 939.852861] env[69367]: ERROR nova.compute.manager [instance: cf8cf8fe-86a0-47f4-b84d-ed9ed046b1fb] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 939.852861] env[69367]: ERROR nova.compute.manager [instance: cf8cf8fe-86a0-47f4-b84d-ed9ed046b1fb] with self.rt.instance_claim(context, instance, node, allocs, [ 939.852861] env[69367]: ERROR nova.compute.manager [instance: cf8cf8fe-86a0-47f4-b84d-ed9ed046b1fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 939.852861] env[69367]: ERROR nova.compute.manager [instance: cf8cf8fe-86a0-47f4-b84d-ed9ed046b1fb] return f(*args, **kwargs) [ 939.852861] env[69367]: ERROR nova.compute.manager [instance: cf8cf8fe-86a0-47f4-b84d-ed9ed046b1fb] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 939.852861] env[69367]: ERROR nova.compute.manager [instance: cf8cf8fe-86a0-47f4-b84d-ed9ed046b1fb] self._update(elevated, cn) [ 939.852861] env[69367]: ERROR nova.compute.manager [instance: cf8cf8fe-86a0-47f4-b84d-ed9ed046b1fb] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 939.852861] env[69367]: ERROR nova.compute.manager [instance: cf8cf8fe-86a0-47f4-b84d-ed9ed046b1fb] self._update_to_placement(context, compute_node, startup) [ 939.852861] env[69367]: ERROR nova.compute.manager [instance: cf8cf8fe-86a0-47f4-b84d-ed9ed046b1fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 939.852861] env[69367]: ERROR nova.compute.manager [instance: cf8cf8fe-86a0-47f4-b84d-ed9ed046b1fb] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 939.852861] env[69367]: ERROR nova.compute.manager [instance: cf8cf8fe-86a0-47f4-b84d-ed9ed046b1fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 939.852861] env[69367]: ERROR nova.compute.manager [instance: cf8cf8fe-86a0-47f4-b84d-ed9ed046b1fb] return attempt.get(self._wrap_exception) [ 939.852861] env[69367]: ERROR nova.compute.manager [instance: cf8cf8fe-86a0-47f4-b84d-ed9ed046b1fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 939.852861] env[69367]: ERROR nova.compute.manager [instance: cf8cf8fe-86a0-47f4-b84d-ed9ed046b1fb] six.reraise(self.value[0], self.value[1], self.value[2]) [ 939.852861] env[69367]: ERROR nova.compute.manager [instance: cf8cf8fe-86a0-47f4-b84d-ed9ed046b1fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 939.852861] env[69367]: ERROR nova.compute.manager [instance: cf8cf8fe-86a0-47f4-b84d-ed9ed046b1fb] raise value [ 939.852861] env[69367]: ERROR nova.compute.manager [instance: cf8cf8fe-86a0-47f4-b84d-ed9ed046b1fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 939.852861] env[69367]: ERROR nova.compute.manager [instance: cf8cf8fe-86a0-47f4-b84d-ed9ed046b1fb] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 939.852861] env[69367]: ERROR nova.compute.manager [instance: cf8cf8fe-86a0-47f4-b84d-ed9ed046b1fb] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 939.852861] env[69367]: ERROR nova.compute.manager [instance: cf8cf8fe-86a0-47f4-b84d-ed9ed046b1fb] self.reportclient.update_from_provider_tree( [ 939.852861] env[69367]: ERROR nova.compute.manager [instance: cf8cf8fe-86a0-47f4-b84d-ed9ed046b1fb] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 939.853972] env[69367]: ERROR nova.compute.manager [instance: cf8cf8fe-86a0-47f4-b84d-ed9ed046b1fb] with catch_all(pd.uuid): [ 939.853972] env[69367]: ERROR nova.compute.manager [instance: cf8cf8fe-86a0-47f4-b84d-ed9ed046b1fb] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 939.853972] env[69367]: ERROR nova.compute.manager [instance: cf8cf8fe-86a0-47f4-b84d-ed9ed046b1fb] self.gen.throw(typ, value, traceback) [ 939.853972] env[69367]: ERROR nova.compute.manager [instance: cf8cf8fe-86a0-47f4-b84d-ed9ed046b1fb] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 939.853972] env[69367]: ERROR nova.compute.manager [instance: cf8cf8fe-86a0-47f4-b84d-ed9ed046b1fb] raise exception.ResourceProviderSyncFailed() [ 939.853972] env[69367]: ERROR nova.compute.manager [instance: cf8cf8fe-86a0-47f4-b84d-ed9ed046b1fb] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 939.853972] env[69367]: ERROR nova.compute.manager [instance: cf8cf8fe-86a0-47f4-b84d-ed9ed046b1fb] [ 939.853972] env[69367]: DEBUG nova.compute.utils [None req-62a9025e-4e99-4b4d-805c-b2d2b6fb4856 tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [instance: cf8cf8fe-86a0-47f4-b84d-ed9ed046b1fb] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 939.854713] env[69367]: DEBUG oslo_concurrency.lockutils [None req-8534761f-6aa5-4bed-b368-7732dd3653a0 tempest-ServerRescueNegativeTestJSON-403967957 tempest-ServerRescueNegativeTestJSON-403967957-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.547s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 939.856134] env[69367]: INFO nova.compute.claims [None req-8534761f-6aa5-4bed-b368-7732dd3653a0 tempest-ServerRescueNegativeTestJSON-403967957 tempest-ServerRescueNegativeTestJSON-403967957-project-member] [instance: b213cabf-af04-4599-ba76-d759d00f4bfc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 939.858654] env[69367]: DEBUG nova.compute.manager [None req-62a9025e-4e99-4b4d-805c-b2d2b6fb4856 tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [instance: cf8cf8fe-86a0-47f4-b84d-ed9ed046b1fb] Build of instance cf8cf8fe-86a0-47f4-b84d-ed9ed046b1fb was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 939.859057] env[69367]: DEBUG nova.compute.manager [None req-62a9025e-4e99-4b4d-805c-b2d2b6fb4856 tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [instance: cf8cf8fe-86a0-47f4-b84d-ed9ed046b1fb] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 939.859283] env[69367]: DEBUG oslo_concurrency.lockutils [None req-62a9025e-4e99-4b4d-805c-b2d2b6fb4856 tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Acquiring lock "refresh_cache-cf8cf8fe-86a0-47f4-b84d-ed9ed046b1fb" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 939.859432] env[69367]: DEBUG oslo_concurrency.lockutils [None req-62a9025e-4e99-4b4d-805c-b2d2b6fb4856 tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Acquired lock "refresh_cache-cf8cf8fe-86a0-47f4-b84d-ed9ed046b1fb" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 939.859606] env[69367]: DEBUG nova.network.neutron [None req-62a9025e-4e99-4b4d-805c-b2d2b6fb4856 tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [instance: cf8cf8fe-86a0-47f4-b84d-ed9ed046b1fb] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 940.386897] env[69367]: DEBUG nova.network.neutron [None req-62a9025e-4e99-4b4d-805c-b2d2b6fb4856 tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [instance: cf8cf8fe-86a0-47f4-b84d-ed9ed046b1fb] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 940.475405] env[69367]: DEBUG nova.network.neutron [None req-62a9025e-4e99-4b4d-805c-b2d2b6fb4856 tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [instance: cf8cf8fe-86a0-47f4-b84d-ed9ed046b1fb] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 940.887956] env[69367]: DEBUG nova.scheduler.client.report [None req-8534761f-6aa5-4bed-b368-7732dd3653a0 tempest-ServerRescueNegativeTestJSON-403967957 tempest-ServerRescueNegativeTestJSON-403967957-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 940.902890] env[69367]: DEBUG nova.scheduler.client.report [None req-8534761f-6aa5-4bed-b368-7732dd3653a0 tempest-ServerRescueNegativeTestJSON-403967957 tempest-ServerRescueNegativeTestJSON-403967957-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 940.903171] env[69367]: DEBUG nova.compute.provider_tree [None req-8534761f-6aa5-4bed-b368-7732dd3653a0 tempest-ServerRescueNegativeTestJSON-403967957 tempest-ServerRescueNegativeTestJSON-403967957-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 940.914238] env[69367]: DEBUG nova.scheduler.client.report [None req-8534761f-6aa5-4bed-b368-7732dd3653a0 tempest-ServerRescueNegativeTestJSON-403967957 tempest-ServerRescueNegativeTestJSON-403967957-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 940.933264] env[69367]: DEBUG nova.scheduler.client.report [None req-8534761f-6aa5-4bed-b368-7732dd3653a0 tempest-ServerRescueNegativeTestJSON-403967957 tempest-ServerRescueNegativeTestJSON-403967957-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 940.977980] env[69367]: DEBUG oslo_concurrency.lockutils [None req-62a9025e-4e99-4b4d-805c-b2d2b6fb4856 tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Releasing lock "refresh_cache-cf8cf8fe-86a0-47f4-b84d-ed9ed046b1fb" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 940.978249] env[69367]: DEBUG nova.compute.manager [None req-62a9025e-4e99-4b4d-805c-b2d2b6fb4856 tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 940.978437] env[69367]: DEBUG nova.compute.manager [None req-62a9025e-4e99-4b4d-805c-b2d2b6fb4856 tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [instance: cf8cf8fe-86a0-47f4-b84d-ed9ed046b1fb] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 940.978606] env[69367]: DEBUG nova.network.neutron [None req-62a9025e-4e99-4b4d-805c-b2d2b6fb4856 tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [instance: cf8cf8fe-86a0-47f4-b84d-ed9ed046b1fb] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 940.994377] env[69367]: DEBUG nova.network.neutron [None req-62a9025e-4e99-4b4d-805c-b2d2b6fb4856 tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [instance: cf8cf8fe-86a0-47f4-b84d-ed9ed046b1fb] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 941.054853] env[69367]: DEBUG oslo_concurrency.lockutils [None req-bf35031f-ec3a-40b5-a66d-5f1415fccadb tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Acquiring lock "557dc011-44a1-4240-9596-d055d57e176f" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 941.055114] env[69367]: DEBUG oslo_concurrency.lockutils [None req-bf35031f-ec3a-40b5-a66d-5f1415fccadb tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Lock "557dc011-44a1-4240-9596-d055d57e176f" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 941.160501] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64d7b533-845f-4220-8524-1aeefe0edc99 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.168510] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f5dd3ef-9154-462e-946a-7c087cb8e9cf {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.197587] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dea3909b-630e-4165-b4f4-0d5e71df4237 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.205082] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccc53686-6529-4233-95c1-beea7e3ca3e9 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.218339] env[69367]: DEBUG nova.compute.provider_tree [None req-8534761f-6aa5-4bed-b368-7732dd3653a0 tempest-ServerRescueNegativeTestJSON-403967957 tempest-ServerRescueNegativeTestJSON-403967957-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 941.499039] env[69367]: DEBUG nova.network.neutron [None req-62a9025e-4e99-4b4d-805c-b2d2b6fb4856 tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [instance: cf8cf8fe-86a0-47f4-b84d-ed9ed046b1fb] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 941.560945] env[69367]: INFO nova.compute.manager [None req-bf35031f-ec3a-40b5-a66d-5f1415fccadb tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Detaching volume 1f3c6529-40e9-4e54-90e5-8dea525edf25 [ 941.598188] env[69367]: INFO nova.virt.block_device [None req-bf35031f-ec3a-40b5-a66d-5f1415fccadb tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Attempting to driver detach volume 1f3c6529-40e9-4e54-90e5-8dea525edf25 from mountpoint /dev/sdb [ 941.598436] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf35031f-ec3a-40b5-a66d-5f1415fccadb tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Volume detach. Driver type: vmdk {{(pid=69367) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 941.598627] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf35031f-ec3a-40b5-a66d-5f1415fccadb tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 557dc011-44a1-4240-9596-d055d57e176f] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-837752', 'volume_id': '1f3c6529-40e9-4e54-90e5-8dea525edf25', 'name': 'volume-1f3c6529-40e9-4e54-90e5-8dea525edf25', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '557dc011-44a1-4240-9596-d055d57e176f', 'attached_at': '', 'detached_at': '', 'volume_id': '1f3c6529-40e9-4e54-90e5-8dea525edf25', 'serial': '1f3c6529-40e9-4e54-90e5-8dea525edf25'} {{(pid=69367) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 941.599583] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a4a0f3f-53db-48ee-8def-6a1a79660db0 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.622957] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d92c8cd2-dbfe-499d-8463-0952fd7307c7 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.630578] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8229eb3a-a1f5-4a38-adb8-ac6c06b325af {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.657627] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bacae2a-43dd-4774-bf68-1b3ad6ab984c {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.676923] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf35031f-ec3a-40b5-a66d-5f1415fccadb tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] The volume has not been displaced from its original location: [localhost-esx-install-datastore (1)] volume-1f3c6529-40e9-4e54-90e5-8dea525edf25/volume-1f3c6529-40e9-4e54-90e5-8dea525edf25.vmdk. No consolidation needed. {{(pid=69367) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 941.682244] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf35031f-ec3a-40b5-a66d-5f1415fccadb tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Reconfiguring VM instance instance-00000037 to detach disk 2001 {{(pid=69367) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 941.682565] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-88d218cd-f380-41e7-b8a5-02468ab5acb4 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.703316] env[69367]: DEBUG oslo_vmware.api [None req-bf35031f-ec3a-40b5-a66d-5f1415fccadb tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Waiting for the task: (returnval){ [ 941.703316] env[69367]: value = "task-4234146" [ 941.703316] env[69367]: _type = "Task" [ 941.703316] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.718087] env[69367]: DEBUG oslo_vmware.api [None req-bf35031f-ec3a-40b5-a66d-5f1415fccadb tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': task-4234146, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.740554] env[69367]: ERROR nova.scheduler.client.report [None req-8534761f-6aa5-4bed-b368-7732dd3653a0 tempest-ServerRescueNegativeTestJSON-403967957 tempest-ServerRescueNegativeTestJSON-403967957-project-member] [req-29158266-1f1f-4e3c-a872-8b863913aa07] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-29158266-1f1f-4e3c-a872-8b863913aa07"}]} [ 941.740965] env[69367]: DEBUG oslo_concurrency.lockutils [None req-8534761f-6aa5-4bed-b368-7732dd3653a0 tempest-ServerRescueNegativeTestJSON-403967957 tempest-ServerRescueNegativeTestJSON-403967957-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.886s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 941.741586] env[69367]: ERROR nova.compute.manager [None req-8534761f-6aa5-4bed-b368-7732dd3653a0 tempest-ServerRescueNegativeTestJSON-403967957 tempest-ServerRescueNegativeTestJSON-403967957-project-member] [instance: b213cabf-af04-4599-ba76-d759d00f4bfc] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 941.741586] env[69367]: ERROR nova.compute.manager [instance: b213cabf-af04-4599-ba76-d759d00f4bfc] Traceback (most recent call last): [ 941.741586] env[69367]: ERROR nova.compute.manager [instance: b213cabf-af04-4599-ba76-d759d00f4bfc] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 941.741586] env[69367]: ERROR nova.compute.manager [instance: b213cabf-af04-4599-ba76-d759d00f4bfc] yield [ 941.741586] env[69367]: ERROR nova.compute.manager [instance: b213cabf-af04-4599-ba76-d759d00f4bfc] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 941.741586] env[69367]: ERROR nova.compute.manager [instance: b213cabf-af04-4599-ba76-d759d00f4bfc] self.set_inventory_for_provider( [ 941.741586] env[69367]: ERROR nova.compute.manager [instance: b213cabf-af04-4599-ba76-d759d00f4bfc] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 941.741586] env[69367]: ERROR nova.compute.manager [instance: b213cabf-af04-4599-ba76-d759d00f4bfc] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 941.741586] env[69367]: ERROR nova.compute.manager [instance: b213cabf-af04-4599-ba76-d759d00f4bfc] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-29158266-1f1f-4e3c-a872-8b863913aa07"}]} [ 941.741586] env[69367]: ERROR nova.compute.manager [instance: b213cabf-af04-4599-ba76-d759d00f4bfc] [ 941.741586] env[69367]: ERROR nova.compute.manager [instance: b213cabf-af04-4599-ba76-d759d00f4bfc] During handling of the above exception, another exception occurred: [ 941.741586] env[69367]: ERROR nova.compute.manager [instance: b213cabf-af04-4599-ba76-d759d00f4bfc] [ 941.741586] env[69367]: ERROR nova.compute.manager [instance: b213cabf-af04-4599-ba76-d759d00f4bfc] Traceback (most recent call last): [ 941.741586] env[69367]: ERROR nova.compute.manager [instance: b213cabf-af04-4599-ba76-d759d00f4bfc] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 941.741586] env[69367]: ERROR nova.compute.manager [instance: b213cabf-af04-4599-ba76-d759d00f4bfc] with self.rt.instance_claim(context, instance, node, allocs, [ 941.741586] env[69367]: ERROR nova.compute.manager [instance: b213cabf-af04-4599-ba76-d759d00f4bfc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 941.741586] env[69367]: ERROR nova.compute.manager [instance: b213cabf-af04-4599-ba76-d759d00f4bfc] return f(*args, **kwargs) [ 941.741586] env[69367]: ERROR nova.compute.manager [instance: b213cabf-af04-4599-ba76-d759d00f4bfc] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 941.741586] env[69367]: ERROR nova.compute.manager [instance: b213cabf-af04-4599-ba76-d759d00f4bfc] self._update(elevated, cn) [ 941.741586] env[69367]: ERROR nova.compute.manager [instance: b213cabf-af04-4599-ba76-d759d00f4bfc] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 941.741586] env[69367]: ERROR nova.compute.manager [instance: b213cabf-af04-4599-ba76-d759d00f4bfc] self._update_to_placement(context, compute_node, startup) [ 941.741586] env[69367]: ERROR nova.compute.manager [instance: b213cabf-af04-4599-ba76-d759d00f4bfc] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 941.741586] env[69367]: ERROR nova.compute.manager [instance: b213cabf-af04-4599-ba76-d759d00f4bfc] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 941.741586] env[69367]: ERROR nova.compute.manager [instance: b213cabf-af04-4599-ba76-d759d00f4bfc] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 941.741586] env[69367]: ERROR nova.compute.manager [instance: b213cabf-af04-4599-ba76-d759d00f4bfc] return attempt.get(self._wrap_exception) [ 941.741586] env[69367]: ERROR nova.compute.manager [instance: b213cabf-af04-4599-ba76-d759d00f4bfc] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 941.741586] env[69367]: ERROR nova.compute.manager [instance: b213cabf-af04-4599-ba76-d759d00f4bfc] six.reraise(self.value[0], self.value[1], self.value[2]) [ 941.741586] env[69367]: ERROR nova.compute.manager [instance: b213cabf-af04-4599-ba76-d759d00f4bfc] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 941.741586] env[69367]: ERROR nova.compute.manager [instance: b213cabf-af04-4599-ba76-d759d00f4bfc] raise value [ 941.741586] env[69367]: ERROR nova.compute.manager [instance: b213cabf-af04-4599-ba76-d759d00f4bfc] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 941.741586] env[69367]: ERROR nova.compute.manager [instance: b213cabf-af04-4599-ba76-d759d00f4bfc] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 941.741586] env[69367]: ERROR nova.compute.manager [instance: b213cabf-af04-4599-ba76-d759d00f4bfc] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 941.741586] env[69367]: ERROR nova.compute.manager [instance: b213cabf-af04-4599-ba76-d759d00f4bfc] self.reportclient.update_from_provider_tree( [ 941.741586] env[69367]: ERROR nova.compute.manager [instance: b213cabf-af04-4599-ba76-d759d00f4bfc] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 941.742661] env[69367]: ERROR nova.compute.manager [instance: b213cabf-af04-4599-ba76-d759d00f4bfc] with catch_all(pd.uuid): [ 941.742661] env[69367]: ERROR nova.compute.manager [instance: b213cabf-af04-4599-ba76-d759d00f4bfc] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 941.742661] env[69367]: ERROR nova.compute.manager [instance: b213cabf-af04-4599-ba76-d759d00f4bfc] self.gen.throw(typ, value, traceback) [ 941.742661] env[69367]: ERROR nova.compute.manager [instance: b213cabf-af04-4599-ba76-d759d00f4bfc] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 941.742661] env[69367]: ERROR nova.compute.manager [instance: b213cabf-af04-4599-ba76-d759d00f4bfc] raise exception.ResourceProviderSyncFailed() [ 941.742661] env[69367]: ERROR nova.compute.manager [instance: b213cabf-af04-4599-ba76-d759d00f4bfc] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 941.742661] env[69367]: ERROR nova.compute.manager [instance: b213cabf-af04-4599-ba76-d759d00f4bfc] [ 941.742661] env[69367]: DEBUG nova.compute.utils [None req-8534761f-6aa5-4bed-b368-7732dd3653a0 tempest-ServerRescueNegativeTestJSON-403967957 tempest-ServerRescueNegativeTestJSON-403967957-project-member] [instance: b213cabf-af04-4599-ba76-d759d00f4bfc] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 941.743644] env[69367]: DEBUG oslo_concurrency.lockutils [None req-f0c7a18a-18a4-4870-96e7-c1f1a53505fa tempest-ServerRescueNegativeTestJSON-403967957 tempest-ServerRescueNegativeTestJSON-403967957-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.981s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 941.745136] env[69367]: INFO nova.compute.claims [None req-f0c7a18a-18a4-4870-96e7-c1f1a53505fa tempest-ServerRescueNegativeTestJSON-403967957 tempest-ServerRescueNegativeTestJSON-403967957-project-member] [instance: b401b3bf-4df9-40b7-bb75-e54a8417f397] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 941.747690] env[69367]: DEBUG nova.compute.manager [None req-8534761f-6aa5-4bed-b368-7732dd3653a0 tempest-ServerRescueNegativeTestJSON-403967957 tempest-ServerRescueNegativeTestJSON-403967957-project-member] [instance: b213cabf-af04-4599-ba76-d759d00f4bfc] Build of instance b213cabf-af04-4599-ba76-d759d00f4bfc was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 941.748122] env[69367]: DEBUG nova.compute.manager [None req-8534761f-6aa5-4bed-b368-7732dd3653a0 tempest-ServerRescueNegativeTestJSON-403967957 tempest-ServerRescueNegativeTestJSON-403967957-project-member] [instance: b213cabf-af04-4599-ba76-d759d00f4bfc] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 941.748527] env[69367]: DEBUG oslo_concurrency.lockutils [None req-8534761f-6aa5-4bed-b368-7732dd3653a0 tempest-ServerRescueNegativeTestJSON-403967957 tempest-ServerRescueNegativeTestJSON-403967957-project-member] Acquiring lock "refresh_cache-b213cabf-af04-4599-ba76-d759d00f4bfc" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 941.748649] env[69367]: DEBUG oslo_concurrency.lockutils [None req-8534761f-6aa5-4bed-b368-7732dd3653a0 tempest-ServerRescueNegativeTestJSON-403967957 tempest-ServerRescueNegativeTestJSON-403967957-project-member] Acquired lock "refresh_cache-b213cabf-af04-4599-ba76-d759d00f4bfc" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 941.748807] env[69367]: DEBUG nova.network.neutron [None req-8534761f-6aa5-4bed-b368-7732dd3653a0 tempest-ServerRescueNegativeTestJSON-403967957 tempest-ServerRescueNegativeTestJSON-403967957-project-member] [instance: b213cabf-af04-4599-ba76-d759d00f4bfc] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 942.001835] env[69367]: INFO nova.compute.manager [None req-62a9025e-4e99-4b4d-805c-b2d2b6fb4856 tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] [instance: cf8cf8fe-86a0-47f4-b84d-ed9ed046b1fb] Took 1.02 seconds to deallocate network for instance. [ 942.213420] env[69367]: DEBUG oslo_vmware.api [None req-bf35031f-ec3a-40b5-a66d-5f1415fccadb tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': task-4234146, 'name': ReconfigVM_Task, 'duration_secs': 0.220239} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.213702] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf35031f-ec3a-40b5-a66d-5f1415fccadb tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Reconfigured VM instance instance-00000037 to detach disk 2001 {{(pid=69367) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 942.218281] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-452fa654-0a88-410e-b0a1-7d083d6b4049 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.234090] env[69367]: DEBUG oslo_vmware.api [None req-bf35031f-ec3a-40b5-a66d-5f1415fccadb tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Waiting for the task: (returnval){ [ 942.234090] env[69367]: value = "task-4234147" [ 942.234090] env[69367]: _type = "Task" [ 942.234090] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.242221] env[69367]: DEBUG oslo_vmware.api [None req-bf35031f-ec3a-40b5-a66d-5f1415fccadb tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': task-4234147, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.272634] env[69367]: DEBUG nova.network.neutron [None req-8534761f-6aa5-4bed-b368-7732dd3653a0 tempest-ServerRescueNegativeTestJSON-403967957 tempest-ServerRescueNegativeTestJSON-403967957-project-member] [instance: b213cabf-af04-4599-ba76-d759d00f4bfc] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 942.359073] env[69367]: DEBUG nova.network.neutron [None req-8534761f-6aa5-4bed-b368-7732dd3653a0 tempest-ServerRescueNegativeTestJSON-403967957 tempest-ServerRescueNegativeTestJSON-403967957-project-member] [instance: b213cabf-af04-4599-ba76-d759d00f4bfc] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 942.745158] env[69367]: DEBUG oslo_vmware.api [None req-bf35031f-ec3a-40b5-a66d-5f1415fccadb tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': task-4234147, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.775883] env[69367]: DEBUG nova.scheduler.client.report [None req-f0c7a18a-18a4-4870-96e7-c1f1a53505fa tempest-ServerRescueNegativeTestJSON-403967957 tempest-ServerRescueNegativeTestJSON-403967957-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 942.791024] env[69367]: DEBUG nova.scheduler.client.report [None req-f0c7a18a-18a4-4870-96e7-c1f1a53505fa tempest-ServerRescueNegativeTestJSON-403967957 tempest-ServerRescueNegativeTestJSON-403967957-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 942.791024] env[69367]: DEBUG nova.compute.provider_tree [None req-f0c7a18a-18a4-4870-96e7-c1f1a53505fa tempest-ServerRescueNegativeTestJSON-403967957 tempest-ServerRescueNegativeTestJSON-403967957-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 942.802121] env[69367]: DEBUG nova.scheduler.client.report [None req-f0c7a18a-18a4-4870-96e7-c1f1a53505fa tempest-ServerRescueNegativeTestJSON-403967957 tempest-ServerRescueNegativeTestJSON-403967957-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 942.821511] env[69367]: DEBUG nova.scheduler.client.report [None req-f0c7a18a-18a4-4870-96e7-c1f1a53505fa tempest-ServerRescueNegativeTestJSON-403967957 tempest-ServerRescueNegativeTestJSON-403967957-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 942.864018] env[69367]: DEBUG oslo_concurrency.lockutils [None req-8534761f-6aa5-4bed-b368-7732dd3653a0 tempest-ServerRescueNegativeTestJSON-403967957 tempest-ServerRescueNegativeTestJSON-403967957-project-member] Releasing lock "refresh_cache-b213cabf-af04-4599-ba76-d759d00f4bfc" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 942.864265] env[69367]: DEBUG nova.compute.manager [None req-8534761f-6aa5-4bed-b368-7732dd3653a0 tempest-ServerRescueNegativeTestJSON-403967957 tempest-ServerRescueNegativeTestJSON-403967957-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 942.864457] env[69367]: DEBUG nova.compute.manager [None req-8534761f-6aa5-4bed-b368-7732dd3653a0 tempest-ServerRescueNegativeTestJSON-403967957 tempest-ServerRescueNegativeTestJSON-403967957-project-member] [instance: b213cabf-af04-4599-ba76-d759d00f4bfc] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 942.864717] env[69367]: DEBUG nova.network.neutron [None req-8534761f-6aa5-4bed-b368-7732dd3653a0 tempest-ServerRescueNegativeTestJSON-403967957 tempest-ServerRescueNegativeTestJSON-403967957-project-member] [instance: b213cabf-af04-4599-ba76-d759d00f4bfc] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 942.880476] env[69367]: DEBUG nova.network.neutron [None req-8534761f-6aa5-4bed-b368-7732dd3653a0 tempest-ServerRescueNegativeTestJSON-403967957 tempest-ServerRescueNegativeTestJSON-403967957-project-member] [instance: b213cabf-af04-4599-ba76-d759d00f4bfc] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 943.029819] env[69367]: INFO nova.scheduler.client.report [None req-62a9025e-4e99-4b4d-805c-b2d2b6fb4856 tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Deleted allocations for instance cf8cf8fe-86a0-47f4-b84d-ed9ed046b1fb [ 943.077997] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62db2d24-f480-4df6-8fe4-abdf60e9fa65 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.086600] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86b37e56-f45c-4294-95aa-eb05d3129de6 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.118502] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6441ce6a-6cc6-455f-bac2-e4c6cbf0f4a7 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.126833] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e8fa34c-6c6c-413d-9f83-fc01aa0c60be {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.141507] env[69367]: DEBUG nova.compute.provider_tree [None req-f0c7a18a-18a4-4870-96e7-c1f1a53505fa tempest-ServerRescueNegativeTestJSON-403967957 tempest-ServerRescueNegativeTestJSON-403967957-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 943.245468] env[69367]: DEBUG oslo_vmware.api [None req-bf35031f-ec3a-40b5-a66d-5f1415fccadb tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': task-4234147, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.384871] env[69367]: DEBUG nova.network.neutron [None req-8534761f-6aa5-4bed-b368-7732dd3653a0 tempest-ServerRescueNegativeTestJSON-403967957 tempest-ServerRescueNegativeTestJSON-403967957-project-member] [instance: b213cabf-af04-4599-ba76-d759d00f4bfc] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 943.539861] env[69367]: DEBUG oslo_concurrency.lockutils [None req-62a9025e-4e99-4b4d-805c-b2d2b6fb4856 tempest-ServerDiskConfigTestJSON-2118303032 tempest-ServerDiskConfigTestJSON-2118303032-project-member] Lock "cf8cf8fe-86a0-47f4-b84d-ed9ed046b1fb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 8.770s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 943.661980] env[69367]: ERROR nova.scheduler.client.report [None req-f0c7a18a-18a4-4870-96e7-c1f1a53505fa tempest-ServerRescueNegativeTestJSON-403967957 tempest-ServerRescueNegativeTestJSON-403967957-project-member] [req-9279b1fa-e82f-41f7-8e10-d362c2cdf076] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-9279b1fa-e82f-41f7-8e10-d362c2cdf076"}]} [ 943.663106] env[69367]: DEBUG oslo_concurrency.lockutils [None req-f0c7a18a-18a4-4870-96e7-c1f1a53505fa tempest-ServerRescueNegativeTestJSON-403967957 tempest-ServerRescueNegativeTestJSON-403967957-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.919s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 943.663345] env[69367]: ERROR nova.compute.manager [None req-f0c7a18a-18a4-4870-96e7-c1f1a53505fa tempest-ServerRescueNegativeTestJSON-403967957 tempest-ServerRescueNegativeTestJSON-403967957-project-member] [instance: b401b3bf-4df9-40b7-bb75-e54a8417f397] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 943.663345] env[69367]: ERROR nova.compute.manager [instance: b401b3bf-4df9-40b7-bb75-e54a8417f397] Traceback (most recent call last): [ 943.663345] env[69367]: ERROR nova.compute.manager [instance: b401b3bf-4df9-40b7-bb75-e54a8417f397] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 943.663345] env[69367]: ERROR nova.compute.manager [instance: b401b3bf-4df9-40b7-bb75-e54a8417f397] yield [ 943.663345] env[69367]: ERROR nova.compute.manager [instance: b401b3bf-4df9-40b7-bb75-e54a8417f397] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 943.663345] env[69367]: ERROR nova.compute.manager [instance: b401b3bf-4df9-40b7-bb75-e54a8417f397] self.set_inventory_for_provider( [ 943.663345] env[69367]: ERROR nova.compute.manager [instance: b401b3bf-4df9-40b7-bb75-e54a8417f397] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 943.663345] env[69367]: ERROR nova.compute.manager [instance: b401b3bf-4df9-40b7-bb75-e54a8417f397] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 943.663345] env[69367]: ERROR nova.compute.manager [instance: b401b3bf-4df9-40b7-bb75-e54a8417f397] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-9279b1fa-e82f-41f7-8e10-d362c2cdf076"}]} [ 943.663345] env[69367]: ERROR nova.compute.manager [instance: b401b3bf-4df9-40b7-bb75-e54a8417f397] [ 943.663345] env[69367]: ERROR nova.compute.manager [instance: b401b3bf-4df9-40b7-bb75-e54a8417f397] During handling of the above exception, another exception occurred: [ 943.663345] env[69367]: ERROR nova.compute.manager [instance: b401b3bf-4df9-40b7-bb75-e54a8417f397] [ 943.663345] env[69367]: ERROR nova.compute.manager [instance: b401b3bf-4df9-40b7-bb75-e54a8417f397] Traceback (most recent call last): [ 943.663345] env[69367]: ERROR nova.compute.manager [instance: b401b3bf-4df9-40b7-bb75-e54a8417f397] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 943.663345] env[69367]: ERROR nova.compute.manager [instance: b401b3bf-4df9-40b7-bb75-e54a8417f397] with self.rt.instance_claim(context, instance, node, allocs, [ 943.663345] env[69367]: ERROR nova.compute.manager [instance: b401b3bf-4df9-40b7-bb75-e54a8417f397] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 943.663345] env[69367]: ERROR nova.compute.manager [instance: b401b3bf-4df9-40b7-bb75-e54a8417f397] return f(*args, **kwargs) [ 943.663345] env[69367]: ERROR nova.compute.manager [instance: b401b3bf-4df9-40b7-bb75-e54a8417f397] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 943.663345] env[69367]: ERROR nova.compute.manager [instance: b401b3bf-4df9-40b7-bb75-e54a8417f397] self._update(elevated, cn) [ 943.663345] env[69367]: ERROR nova.compute.manager [instance: b401b3bf-4df9-40b7-bb75-e54a8417f397] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 943.663345] env[69367]: ERROR nova.compute.manager [instance: b401b3bf-4df9-40b7-bb75-e54a8417f397] self._update_to_placement(context, compute_node, startup) [ 943.663345] env[69367]: ERROR nova.compute.manager [instance: b401b3bf-4df9-40b7-bb75-e54a8417f397] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 943.663345] env[69367]: ERROR nova.compute.manager [instance: b401b3bf-4df9-40b7-bb75-e54a8417f397] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 943.663345] env[69367]: ERROR nova.compute.manager [instance: b401b3bf-4df9-40b7-bb75-e54a8417f397] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 943.663345] env[69367]: ERROR nova.compute.manager [instance: b401b3bf-4df9-40b7-bb75-e54a8417f397] return attempt.get(self._wrap_exception) [ 943.663345] env[69367]: ERROR nova.compute.manager [instance: b401b3bf-4df9-40b7-bb75-e54a8417f397] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 943.663345] env[69367]: ERROR nova.compute.manager [instance: b401b3bf-4df9-40b7-bb75-e54a8417f397] six.reraise(self.value[0], self.value[1], self.value[2]) [ 943.663345] env[69367]: ERROR nova.compute.manager [instance: b401b3bf-4df9-40b7-bb75-e54a8417f397] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 943.663345] env[69367]: ERROR nova.compute.manager [instance: b401b3bf-4df9-40b7-bb75-e54a8417f397] raise value [ 943.663345] env[69367]: ERROR nova.compute.manager [instance: b401b3bf-4df9-40b7-bb75-e54a8417f397] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 943.663345] env[69367]: ERROR nova.compute.manager [instance: b401b3bf-4df9-40b7-bb75-e54a8417f397] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 943.663345] env[69367]: ERROR nova.compute.manager [instance: b401b3bf-4df9-40b7-bb75-e54a8417f397] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 943.663345] env[69367]: ERROR nova.compute.manager [instance: b401b3bf-4df9-40b7-bb75-e54a8417f397] self.reportclient.update_from_provider_tree( [ 943.663345] env[69367]: ERROR nova.compute.manager [instance: b401b3bf-4df9-40b7-bb75-e54a8417f397] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 943.664249] env[69367]: ERROR nova.compute.manager [instance: b401b3bf-4df9-40b7-bb75-e54a8417f397] with catch_all(pd.uuid): [ 943.664249] env[69367]: ERROR nova.compute.manager [instance: b401b3bf-4df9-40b7-bb75-e54a8417f397] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 943.664249] env[69367]: ERROR nova.compute.manager [instance: b401b3bf-4df9-40b7-bb75-e54a8417f397] self.gen.throw(typ, value, traceback) [ 943.664249] env[69367]: ERROR nova.compute.manager [instance: b401b3bf-4df9-40b7-bb75-e54a8417f397] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 943.664249] env[69367]: ERROR nova.compute.manager [instance: b401b3bf-4df9-40b7-bb75-e54a8417f397] raise exception.ResourceProviderSyncFailed() [ 943.664249] env[69367]: ERROR nova.compute.manager [instance: b401b3bf-4df9-40b7-bb75-e54a8417f397] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 943.664249] env[69367]: ERROR nova.compute.manager [instance: b401b3bf-4df9-40b7-bb75-e54a8417f397] [ 943.664249] env[69367]: DEBUG nova.compute.utils [None req-f0c7a18a-18a4-4870-96e7-c1f1a53505fa tempest-ServerRescueNegativeTestJSON-403967957 tempest-ServerRescueNegativeTestJSON-403967957-project-member] [instance: b401b3bf-4df9-40b7-bb75-e54a8417f397] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 943.666077] env[69367]: DEBUG nova.compute.manager [None req-f0c7a18a-18a4-4870-96e7-c1f1a53505fa tempest-ServerRescueNegativeTestJSON-403967957 tempest-ServerRescueNegativeTestJSON-403967957-project-member] [instance: b401b3bf-4df9-40b7-bb75-e54a8417f397] Build of instance b401b3bf-4df9-40b7-bb75-e54a8417f397 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 943.666549] env[69367]: DEBUG nova.compute.manager [None req-f0c7a18a-18a4-4870-96e7-c1f1a53505fa tempest-ServerRescueNegativeTestJSON-403967957 tempest-ServerRescueNegativeTestJSON-403967957-project-member] [instance: b401b3bf-4df9-40b7-bb75-e54a8417f397] Unplugging VIFs for instance {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 943.666829] env[69367]: DEBUG oslo_concurrency.lockutils [None req-f0c7a18a-18a4-4870-96e7-c1f1a53505fa tempest-ServerRescueNegativeTestJSON-403967957 tempest-ServerRescueNegativeTestJSON-403967957-project-member] Acquiring lock "refresh_cache-b401b3bf-4df9-40b7-bb75-e54a8417f397" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 943.667029] env[69367]: DEBUG oslo_concurrency.lockutils [None req-f0c7a18a-18a4-4870-96e7-c1f1a53505fa tempest-ServerRescueNegativeTestJSON-403967957 tempest-ServerRescueNegativeTestJSON-403967957-project-member] Acquired lock "refresh_cache-b401b3bf-4df9-40b7-bb75-e54a8417f397" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 943.667339] env[69367]: DEBUG nova.network.neutron [None req-f0c7a18a-18a4-4870-96e7-c1f1a53505fa tempest-ServerRescueNegativeTestJSON-403967957 tempest-ServerRescueNegativeTestJSON-403967957-project-member] [instance: b401b3bf-4df9-40b7-bb75-e54a8417f397] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 943.745961] env[69367]: DEBUG oslo_vmware.api [None req-bf35031f-ec3a-40b5-a66d-5f1415fccadb tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': task-4234147, 'name': ReconfigVM_Task, 'duration_secs': 1.150692} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.746356] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf35031f-ec3a-40b5-a66d-5f1415fccadb tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-837752', 'volume_id': '1f3c6529-40e9-4e54-90e5-8dea525edf25', 'name': 'volume-1f3c6529-40e9-4e54-90e5-8dea525edf25', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '557dc011-44a1-4240-9596-d055d57e176f', 'attached_at': '', 'detached_at': '', 'volume_id': '1f3c6529-40e9-4e54-90e5-8dea525edf25', 'serial': '1f3c6529-40e9-4e54-90e5-8dea525edf25'} {{(pid=69367) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 943.887828] env[69367]: INFO nova.compute.manager [None req-8534761f-6aa5-4bed-b368-7732dd3653a0 tempest-ServerRescueNegativeTestJSON-403967957 tempest-ServerRescueNegativeTestJSON-403967957-project-member] [instance: b213cabf-af04-4599-ba76-d759d00f4bfc] Took 1.02 seconds to deallocate network for instance. [ 944.188812] env[69367]: DEBUG nova.network.neutron [None req-f0c7a18a-18a4-4870-96e7-c1f1a53505fa tempest-ServerRescueNegativeTestJSON-403967957 tempest-ServerRescueNegativeTestJSON-403967957-project-member] [instance: b401b3bf-4df9-40b7-bb75-e54a8417f397] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 944.261794] env[69367]: DEBUG nova.network.neutron [None req-f0c7a18a-18a4-4870-96e7-c1f1a53505fa tempest-ServerRescueNegativeTestJSON-403967957 tempest-ServerRescueNegativeTestJSON-403967957-project-member] [instance: b401b3bf-4df9-40b7-bb75-e54a8417f397] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 944.287497] env[69367]: DEBUG nova.objects.instance [None req-bf35031f-ec3a-40b5-a66d-5f1415fccadb tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Lazy-loading 'flavor' on Instance uuid 557dc011-44a1-4240-9596-d055d57e176f {{(pid=69367) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 944.765880] env[69367]: DEBUG oslo_concurrency.lockutils [None req-f0c7a18a-18a4-4870-96e7-c1f1a53505fa tempest-ServerRescueNegativeTestJSON-403967957 tempest-ServerRescueNegativeTestJSON-403967957-project-member] Releasing lock "refresh_cache-b401b3bf-4df9-40b7-bb75-e54a8417f397" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 944.766249] env[69367]: DEBUG nova.compute.manager [None req-f0c7a18a-18a4-4870-96e7-c1f1a53505fa tempest-ServerRescueNegativeTestJSON-403967957 tempest-ServerRescueNegativeTestJSON-403967957-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69367) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 944.766378] env[69367]: DEBUG nova.compute.manager [None req-f0c7a18a-18a4-4870-96e7-c1f1a53505fa tempest-ServerRescueNegativeTestJSON-403967957 tempest-ServerRescueNegativeTestJSON-403967957-project-member] [instance: b401b3bf-4df9-40b7-bb75-e54a8417f397] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 944.766552] env[69367]: DEBUG nova.network.neutron [None req-f0c7a18a-18a4-4870-96e7-c1f1a53505fa tempest-ServerRescueNegativeTestJSON-403967957 tempest-ServerRescueNegativeTestJSON-403967957-project-member] [instance: b401b3bf-4df9-40b7-bb75-e54a8417f397] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 944.787620] env[69367]: DEBUG nova.network.neutron [None req-f0c7a18a-18a4-4870-96e7-c1f1a53505fa tempest-ServerRescueNegativeTestJSON-403967957 tempest-ServerRescueNegativeTestJSON-403967957-project-member] [instance: b401b3bf-4df9-40b7-bb75-e54a8417f397] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 944.921209] env[69367]: INFO nova.scheduler.client.report [None req-8534761f-6aa5-4bed-b368-7732dd3653a0 tempest-ServerRescueNegativeTestJSON-403967957 tempest-ServerRescueNegativeTestJSON-403967957-project-member] Deleted allocations for instance b213cabf-af04-4599-ba76-d759d00f4bfc [ 945.273393] env[69367]: DEBUG oslo_concurrency.lockutils [None req-f9fff3e3-35d9-4a51-b2d4-410214cc9cd3 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Acquiring lock "557dc011-44a1-4240-9596-d055d57e176f" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 945.290388] env[69367]: DEBUG nova.network.neutron [None req-f0c7a18a-18a4-4870-96e7-c1f1a53505fa tempest-ServerRescueNegativeTestJSON-403967957 tempest-ServerRescueNegativeTestJSON-403967957-project-member] [instance: b401b3bf-4df9-40b7-bb75-e54a8417f397] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 945.293797] env[69367]: DEBUG oslo_concurrency.lockutils [None req-bf35031f-ec3a-40b5-a66d-5f1415fccadb tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Lock "557dc011-44a1-4240-9596-d055d57e176f" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.239s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 945.295056] env[69367]: DEBUG oslo_concurrency.lockutils [None req-f9fff3e3-35d9-4a51-b2d4-410214cc9cd3 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Lock "557dc011-44a1-4240-9596-d055d57e176f" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.022s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 945.295240] env[69367]: DEBUG nova.compute.manager [None req-f9fff3e3-35d9-4a51-b2d4-410214cc9cd3 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Checking state {{(pid=69367) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 945.296296] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7605176b-3d6e-4cdf-9e0f-126101312356 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.304049] env[69367]: DEBUG nova.compute.manager [None req-f9fff3e3-35d9-4a51-b2d4-410214cc9cd3 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69367) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 945.304816] env[69367]: DEBUG nova.objects.instance [None req-f9fff3e3-35d9-4a51-b2d4-410214cc9cd3 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Lazy-loading 'flavor' on Instance uuid 557dc011-44a1-4240-9596-d055d57e176f {{(pid=69367) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 945.429669] env[69367]: DEBUG oslo_concurrency.lockutils [None req-8534761f-6aa5-4bed-b368-7732dd3653a0 tempest-ServerRescueNegativeTestJSON-403967957 tempest-ServerRescueNegativeTestJSON-403967957-project-member] Lock "b213cabf-af04-4599-ba76-d759d00f4bfc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 8.158s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 945.795918] env[69367]: INFO nova.compute.manager [None req-f0c7a18a-18a4-4870-96e7-c1f1a53505fa tempest-ServerRescueNegativeTestJSON-403967957 tempest-ServerRescueNegativeTestJSON-403967957-project-member] [instance: b401b3bf-4df9-40b7-bb75-e54a8417f397] Took 1.03 seconds to deallocate network for instance. [ 946.312398] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9fff3e3-35d9-4a51-b2d4-410214cc9cd3 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Powering off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 946.312729] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-71db547f-9d6d-470b-ac53-71c2eb7354c6 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.320289] env[69367]: DEBUG oslo_vmware.api [None req-f9fff3e3-35d9-4a51-b2d4-410214cc9cd3 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Waiting for the task: (returnval){ [ 946.320289] env[69367]: value = "task-4234148" [ 946.320289] env[69367]: _type = "Task" [ 946.320289] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.329365] env[69367]: DEBUG oslo_vmware.api [None req-f9fff3e3-35d9-4a51-b2d4-410214cc9cd3 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': task-4234148, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.830848] env[69367]: DEBUG oslo_vmware.api [None req-f9fff3e3-35d9-4a51-b2d4-410214cc9cd3 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': task-4234148, 'name': PowerOffVM_Task, 'duration_secs': 0.175107} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.831813] env[69367]: INFO nova.scheduler.client.report [None req-f0c7a18a-18a4-4870-96e7-c1f1a53505fa tempest-ServerRescueNegativeTestJSON-403967957 tempest-ServerRescueNegativeTestJSON-403967957-project-member] Deleted allocations for instance b401b3bf-4df9-40b7-bb75-e54a8417f397 [ 946.837889] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9fff3e3-35d9-4a51-b2d4-410214cc9cd3 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Powered off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 946.837889] env[69367]: DEBUG nova.compute.manager [None req-f9fff3e3-35d9-4a51-b2d4-410214cc9cd3 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Checking state {{(pid=69367) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 946.839242] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45e6b346-4b23-48fd-be60-6d79668fa2e4 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.343337] env[69367]: DEBUG oslo_concurrency.lockutils [None req-f0c7a18a-18a4-4870-96e7-c1f1a53505fa tempest-ServerRescueNegativeTestJSON-403967957 tempest-ServerRescueNegativeTestJSON-403967957-project-member] Lock "b401b3bf-4df9-40b7-bb75-e54a8417f397" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 9.602s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 947.351532] env[69367]: DEBUG oslo_concurrency.lockutils [None req-f9fff3e3-35d9-4a51-b2d4-410214cc9cd3 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Lock "557dc011-44a1-4240-9596-d055d57e176f" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.056s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 947.724099] env[69367]: DEBUG nova.objects.instance [None req-a30b9e50-8b57-4d75-be76-56ba83554549 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Lazy-loading 'flavor' on Instance uuid 557dc011-44a1-4240-9596-d055d57e176f {{(pid=69367) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 948.228512] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a30b9e50-8b57-4d75-be76-56ba83554549 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Acquiring lock "refresh_cache-557dc011-44a1-4240-9596-d055d57e176f" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 948.228857] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a30b9e50-8b57-4d75-be76-56ba83554549 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Acquired lock "refresh_cache-557dc011-44a1-4240-9596-d055d57e176f" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 948.229211] env[69367]: DEBUG nova.network.neutron [None req-a30b9e50-8b57-4d75-be76-56ba83554549 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 948.229211] env[69367]: DEBUG nova.objects.instance [None req-a30b9e50-8b57-4d75-be76-56ba83554549 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Lazy-loading 'info_cache' on Instance uuid 557dc011-44a1-4240-9596-d055d57e176f {{(pid=69367) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 948.732498] env[69367]: DEBUG nova.objects.base [None req-a30b9e50-8b57-4d75-be76-56ba83554549 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Object Instance<557dc011-44a1-4240-9596-d055d57e176f> lazy-loaded attributes: flavor,info_cache {{(pid=69367) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 949.472346] env[69367]: DEBUG nova.network.neutron [None req-a30b9e50-8b57-4d75-be76-56ba83554549 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Updating instance_info_cache with network_info: [{"id": "cb95db9d-9279-4fd5-bf3d-c4edcbfd26c1", "address": "fa:16:3e:ff:f7:e3", "network": {"id": "0bfdc337-bb57-4c33-9907-9098384ed460", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1159515822-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.190", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd27807405a646e989b95325358a87eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9bb629cd-6d0f-4bed-965c-bd04a2f3ec49", "external-id": "nsx-vlan-transportzone-848", "segmentation_id": 848, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcb95db9d-92", "ovs_interfaceid": "cb95db9d-9279-4fd5-bf3d-c4edcbfd26c1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 949.977547] env[69367]: DEBUG oslo_concurrency.lockutils [None req-a30b9e50-8b57-4d75-be76-56ba83554549 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Releasing lock "refresh_cache-557dc011-44a1-4240-9596-d055d57e176f" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 950.984033] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-a30b9e50-8b57-4d75-be76-56ba83554549 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Powering on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 950.984492] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a8dcaef4-ca4d-452f-9077-59c04d77881d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.992193] env[69367]: DEBUG oslo_vmware.api [None req-a30b9e50-8b57-4d75-be76-56ba83554549 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Waiting for the task: (returnval){ [ 950.992193] env[69367]: value = "task-4234149" [ 950.992193] env[69367]: _type = "Task" [ 950.992193] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.000168] env[69367]: DEBUG oslo_vmware.api [None req-a30b9e50-8b57-4d75-be76-56ba83554549 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': task-4234149, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.504211] env[69367]: DEBUG oslo_vmware.api [None req-a30b9e50-8b57-4d75-be76-56ba83554549 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': task-4234149, 'name': PowerOnVM_Task, 'duration_secs': 0.406565} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.504433] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-a30b9e50-8b57-4d75-be76-56ba83554549 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Powered on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 951.504600] env[69367]: DEBUG nova.compute.manager [None req-a30b9e50-8b57-4d75-be76-56ba83554549 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Checking state {{(pid=69367) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 951.505381] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfb6f9c3-ab35-4550-8be2-57a834c859e5 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.190064] env[69367]: DEBUG oslo_concurrency.lockutils [None req-b9f4f522-b6d7-4f38-85cb-70558c9e2e0b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Acquiring lock "557dc011-44a1-4240-9596-d055d57e176f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 988.190064] env[69367]: DEBUG oslo_concurrency.lockutils [None req-b9f4f522-b6d7-4f38-85cb-70558c9e2e0b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Lock "557dc011-44a1-4240-9596-d055d57e176f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 988.190480] env[69367]: DEBUG oslo_concurrency.lockutils [None req-b9f4f522-b6d7-4f38-85cb-70558c9e2e0b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Acquiring lock "557dc011-44a1-4240-9596-d055d57e176f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 988.190586] env[69367]: DEBUG oslo_concurrency.lockutils [None req-b9f4f522-b6d7-4f38-85cb-70558c9e2e0b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Lock "557dc011-44a1-4240-9596-d055d57e176f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 988.190764] env[69367]: DEBUG oslo_concurrency.lockutils [None req-b9f4f522-b6d7-4f38-85cb-70558c9e2e0b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Lock "557dc011-44a1-4240-9596-d055d57e176f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 988.193834] env[69367]: INFO nova.compute.manager [None req-b9f4f522-b6d7-4f38-85cb-70558c9e2e0b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Terminating instance [ 988.697641] env[69367]: DEBUG nova.compute.manager [None req-b9f4f522-b6d7-4f38-85cb-70558c9e2e0b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Start destroying the instance on the hypervisor. {{(pid=69367) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 988.697899] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-b9f4f522-b6d7-4f38-85cb-70558c9e2e0b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Destroying instance {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 988.698888] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-357fa737-0c1b-4a6a-95c6-62b7ba02fe3d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.707211] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9f4f522-b6d7-4f38-85cb-70558c9e2e0b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Powering off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 988.707467] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ce3faae7-08a7-4bf9-b780-5c6241e2667f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.714900] env[69367]: DEBUG oslo_vmware.api [None req-b9f4f522-b6d7-4f38-85cb-70558c9e2e0b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Waiting for the task: (returnval){ [ 988.714900] env[69367]: value = "task-4234150" [ 988.714900] env[69367]: _type = "Task" [ 988.714900] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.724389] env[69367]: DEBUG oslo_vmware.api [None req-b9f4f522-b6d7-4f38-85cb-70558c9e2e0b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': task-4234150, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.224831] env[69367]: DEBUG oslo_vmware.api [None req-b9f4f522-b6d7-4f38-85cb-70558c9e2e0b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': task-4234150, 'name': PowerOffVM_Task, 'duration_secs': 0.192522} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.225959] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9f4f522-b6d7-4f38-85cb-70558c9e2e0b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Powered off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 989.225959] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-b9f4f522-b6d7-4f38-85cb-70558c9e2e0b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Unregistering the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 989.226128] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0ae5a8a5-6b4a-4d68-9ea3-fcbaf9c74352 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.289072] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-b9f4f522-b6d7-4f38-85cb-70558c9e2e0b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Unregistered the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 989.289334] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-b9f4f522-b6d7-4f38-85cb-70558c9e2e0b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Deleting contents of the VM from datastore datastore2 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 989.289530] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9f4f522-b6d7-4f38-85cb-70558c9e2e0b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Deleting the datastore file [datastore2] 557dc011-44a1-4240-9596-d055d57e176f {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 989.289818] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-635817f0-49f9-4c9d-98d7-84df8b86f7b3 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.297070] env[69367]: DEBUG oslo_vmware.api [None req-b9f4f522-b6d7-4f38-85cb-70558c9e2e0b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Waiting for the task: (returnval){ [ 989.297070] env[69367]: value = "task-4234152" [ 989.297070] env[69367]: _type = "Task" [ 989.297070] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.305249] env[69367]: DEBUG oslo_vmware.api [None req-b9f4f522-b6d7-4f38-85cb-70558c9e2e0b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': task-4234152, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.807664] env[69367]: DEBUG oslo_vmware.api [None req-b9f4f522-b6d7-4f38-85cb-70558c9e2e0b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': task-4234152, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.132845} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.807918] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9f4f522-b6d7-4f38-85cb-70558c9e2e0b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Deleted the datastore file {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 989.808125] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-b9f4f522-b6d7-4f38-85cb-70558c9e2e0b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Deleted contents of the VM from datastore datastore2 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 989.808312] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-b9f4f522-b6d7-4f38-85cb-70558c9e2e0b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Instance destroyed {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 989.808492] env[69367]: INFO nova.compute.manager [None req-b9f4f522-b6d7-4f38-85cb-70558c9e2e0b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Took 1.11 seconds to destroy the instance on the hypervisor. [ 989.808732] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b9f4f522-b6d7-4f38-85cb-70558c9e2e0b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 989.808939] env[69367]: DEBUG nova.compute.manager [-] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 989.809069] env[69367]: DEBUG nova.network.neutron [-] [instance: 557dc011-44a1-4240-9596-d055d57e176f] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 990.293276] env[69367]: DEBUG nova.compute.manager [req-c027fd97-af24-4375-a66c-86184934e543 req-9d5b92c3-5e8e-4bd7-9130-9c8cc67fe0bd service nova] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Received event network-vif-deleted-cb95db9d-9279-4fd5-bf3d-c4edcbfd26c1 {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 990.293708] env[69367]: INFO nova.compute.manager [req-c027fd97-af24-4375-a66c-86184934e543 req-9d5b92c3-5e8e-4bd7-9130-9c8cc67fe0bd service nova] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Neutron deleted interface cb95db9d-9279-4fd5-bf3d-c4edcbfd26c1; detaching it from the instance and deleting it from the info cache [ 990.293781] env[69367]: DEBUG nova.network.neutron [req-c027fd97-af24-4375-a66c-86184934e543 req-9d5b92c3-5e8e-4bd7-9130-9c8cc67fe0bd service nova] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 990.771753] env[69367]: DEBUG nova.network.neutron [-] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 990.796740] env[69367]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2a3b2ba2-54f3-4002-9f7c-bb401eb4666c {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.807354] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-076b3db6-c856-45f8-906a-6bec266a3283 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.834475] env[69367]: DEBUG nova.compute.manager [req-c027fd97-af24-4375-a66c-86184934e543 req-9d5b92c3-5e8e-4bd7-9130-9c8cc67fe0bd service nova] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Detach interface failed, port_id=cb95db9d-9279-4fd5-bf3d-c4edcbfd26c1, reason: Instance 557dc011-44a1-4240-9596-d055d57e176f could not be found. {{(pid=69367) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11601}} [ 991.274652] env[69367]: INFO nova.compute.manager [-] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Took 1.47 seconds to deallocate network for instance. [ 991.782761] env[69367]: DEBUG oslo_concurrency.lockutils [None req-b9f4f522-b6d7-4f38-85cb-70558c9e2e0b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 991.783177] env[69367]: DEBUG oslo_concurrency.lockutils [None req-b9f4f522-b6d7-4f38-85cb-70558c9e2e0b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 991.783314] env[69367]: DEBUG nova.objects.instance [None req-b9f4f522-b6d7-4f38-85cb-70558c9e2e0b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Lazy-loading 'resources' on Instance uuid 557dc011-44a1-4240-9596-d055d57e176f {{(pid=69367) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 992.303980] env[69367]: DEBUG nova.scheduler.client.report [None req-b9f4f522-b6d7-4f38-85cb-70558c9e2e0b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 992.318552] env[69367]: DEBUG nova.scheduler.client.report [None req-b9f4f522-b6d7-4f38-85cb-70558c9e2e0b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 992.318788] env[69367]: DEBUG nova.compute.provider_tree [None req-b9f4f522-b6d7-4f38-85cb-70558c9e2e0b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 992.329057] env[69367]: DEBUG nova.scheduler.client.report [None req-b9f4f522-b6d7-4f38-85cb-70558c9e2e0b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 992.345752] env[69367]: DEBUG nova.scheduler.client.report [None req-b9f4f522-b6d7-4f38-85cb-70558c9e2e0b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 992.545814] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6af9d18b-08e1-41c3-9c58-098b9a97ae27 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.554874] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c6f7b2a-e818-4131-93d7-8c0aca450b82 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.586279] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-507154d6-1702-4c4b-89a7-edeee130601b {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.594303] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad671b67-d8e1-4885-82a3-bf72e17a6977 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.607844] env[69367]: DEBUG nova.compute.provider_tree [None req-b9f4f522-b6d7-4f38-85cb-70558c9e2e0b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Inventory has not changed in ProviderTree for provider: 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 993.111187] env[69367]: DEBUG nova.scheduler.client.report [None req-b9f4f522-b6d7-4f38-85cb-70558c9e2e0b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Inventory has not changed for provider 19ddf8be-7305-4f70-8366-52a9957232e6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 993.616628] env[69367]: DEBUG oslo_concurrency.lockutils [None req-b9f4f522-b6d7-4f38-85cb-70558c9e2e0b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.833s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 993.635169] env[69367]: INFO nova.scheduler.client.report [None req-b9f4f522-b6d7-4f38-85cb-70558c9e2e0b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Deleted allocations for instance 557dc011-44a1-4240-9596-d055d57e176f [ 994.143554] env[69367]: DEBUG oslo_concurrency.lockutils [None req-b9f4f522-b6d7-4f38-85cb-70558c9e2e0b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Lock "557dc011-44a1-4240-9596-d055d57e176f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.953s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 996.360650] env[69367]: DEBUG oslo_concurrency.lockutils [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Acquiring lock "5108cc8a-2c5f-4f1a-b114-d932d0f3e11e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 996.360982] env[69367]: DEBUG oslo_concurrency.lockutils [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Lock "5108cc8a-2c5f-4f1a-b114-d932d0f3e11e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 996.863824] env[69367]: DEBUG nova.compute.manager [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 997.388868] env[69367]: DEBUG oslo_concurrency.lockutils [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 997.389177] env[69367]: DEBUG oslo_concurrency.lockutils [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 997.390765] env[69367]: INFO nova.compute.claims [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 997.904213] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 997.905093] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 997.905269] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 997.905420] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 997.905569] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 997.905711] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 997.905854] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 997.905989] env[69367]: DEBUG nova.compute.manager [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69367) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11183}} [ 997.906423] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 998.408432] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 998.579925] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b539c089-11bf-4b47-958b-dcdc091e222b {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.587569] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94a9c91e-bd9c-437e-97dd-a81a45842dce {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.616821] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef0e711a-d548-4163-9844-b18a17055884 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.624135] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01755716-d6c5-4fe5-b00e-4aabc4d3c713 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.637044] env[69367]: DEBUG nova.compute.provider_tree [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Inventory has not changed in ProviderTree for provider: 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 999.140560] env[69367]: DEBUG nova.scheduler.client.report [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Inventory has not changed for provider 19ddf8be-7305-4f70-8366-52a9957232e6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 999.646029] env[69367]: DEBUG oslo_concurrency.lockutils [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.257s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 999.646587] env[69367]: DEBUG nova.compute.manager [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Start building networks asynchronously for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 999.649270] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 1.241s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 999.649484] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 999.649613] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69367) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 999.650705] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b2e336d-2de1-49ac-9ce5-9b0ed1c9af7f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.659377] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b91bcfc6-f892-42c5-b599-91c0e161d36a {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.673533] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3613dc79-29cd-4f7f-a266-2c2bad346011 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.680212] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc8f4c57-fbe8-4145-adbb-3b9e418237fa {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.710768] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180396MB free_disk=1GB free_vcpus=48 pci_devices=None {{(pid=69367) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 999.710930] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 999.711147] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1000.152362] env[69367]: DEBUG nova.compute.utils [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Using /dev/sd instead of None {{(pid=69367) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1000.153844] env[69367]: DEBUG nova.compute.manager [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Allocating IP information in the background. {{(pid=69367) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1000.154064] env[69367]: DEBUG nova.network.neutron [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] allocate_for_instance() {{(pid=69367) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1000.203219] env[69367]: DEBUG nova.policy [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9c09f4a51f124383a4f6fdb69330416d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bd27807405a646e989b95325358a87eb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69367) authorize /opt/stack/nova/nova/policy.py:192}} [ 1000.453141] env[69367]: DEBUG nova.network.neutron [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Successfully created port: eaedef65-ee11-437f-981f-bde5612a3c8f {{(pid=69367) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1000.657338] env[69367]: DEBUG nova.compute.manager [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Start building block device mappings for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1000.739287] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance a358ce6d-9826-4ddb-8c2f-51bac8db59d4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1000.739453] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 4e346ed1-36e9-421d-975f-e8bb6f05c0a0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1000.739583] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance e1c7d100-4ad7-4871-970f-bb7562bfc6fc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1000.739777] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1000.739915] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance ab365570-ac29-4094-be4c-d49563a465c8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1000.740055] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance c17525ee-d038-4c81-932b-ed74a6de6cb5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1000.740176] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 92c27615-d377-492f-a9db-ff45b2e71537 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1000.740309] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1000.740441] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 011ab7de-98a7-41fc-9e05-e71965c73c09 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1000.740555] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance f8c07fa1-d27c-4d0f-847b-481477cd04bf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1000.740705] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 7f937d89-684b-44f5-9f30-783aeafe99d1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1000.740853] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance ab9d8e3e-65c5-4ac9-920f-3042b8cf2054 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1000.741011] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance c272b0ae-6313-46ab-977c-6de255e77675 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1000.741155] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance d900df05-b65c-4a45-94d1-563afbf9c022 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1000.741302] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 05aae150-5d86-4210-ae7e-8c63e83cb907 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1000.741437] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 42db60d9-e5f7-4925-8f6f-d3884687414a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1000.741545] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1000.741757] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Total usable vcpus: 48, total allocated vcpus: 17 {{(pid=69367) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 1000.741916] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3776MB phys_disk=200GB used_disk=17GB total_vcpus=48 used_vcpus=17 pci_stats=[] {{(pid=69367) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 1000.940569] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d3e123c-fdde-4941-ad00-92b4d35bb278 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.948659] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0658254e-5ecf-4766-8b20-a7505532a48f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.980778] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4afa1816-f9c9-4a12-89b9-104b26dd61fb {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.988667] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cff5050e-259e-486f-8a32-1756302590b1 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.002043] env[69367]: DEBUG nova.compute.provider_tree [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Inventory has not changed in ProviderTree for provider: 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1001.504749] env[69367]: DEBUG nova.scheduler.client.report [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Inventory has not changed for provider 19ddf8be-7305-4f70-8366-52a9957232e6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1001.670604] env[69367]: DEBUG nova.compute.manager [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Start spawning the instance on the hypervisor. {{(pid=69367) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1001.695912] env[69367]: DEBUG nova.virt.hardware [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-19T12:49:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-19T12:49:28Z,direct_url=,disk_format='vmdk',id=2b099420-9152-4d93-9609-4c9317824c11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cd7c200d5cd6461fb951580f8c764c42',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-19T12:49:29Z,virtual_size=,visibility=), allow threads: False {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1001.696181] env[69367]: DEBUG nova.virt.hardware [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Flavor limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1001.696339] env[69367]: DEBUG nova.virt.hardware [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Image limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1001.696524] env[69367]: DEBUG nova.virt.hardware [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Flavor pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1001.696674] env[69367]: DEBUG nova.virt.hardware [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Image pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1001.696825] env[69367]: DEBUG nova.virt.hardware [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1001.697042] env[69367]: DEBUG nova.virt.hardware [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1001.697206] env[69367]: DEBUG nova.virt.hardware [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1001.697374] env[69367]: DEBUG nova.virt.hardware [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Got 1 possible topologies {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1001.697539] env[69367]: DEBUG nova.virt.hardware [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1001.697712] env[69367]: DEBUG nova.virt.hardware [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1001.698589] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-961b9c1d-9e05-4551-b9bd-1903ab7e725d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.707183] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5da9a009-91b5-4ee2-8996-20da97bc4ec6 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.806885] env[69367]: DEBUG nova.compute.manager [req-4b0e53bc-db9c-46bf-ba9f-b90e607eace3 req-f04dc3dc-e197-40ce-80d7-df25493ff743 service nova] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Received event network-vif-plugged-eaedef65-ee11-437f-981f-bde5612a3c8f {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 1001.807123] env[69367]: DEBUG oslo_concurrency.lockutils [req-4b0e53bc-db9c-46bf-ba9f-b90e607eace3 req-f04dc3dc-e197-40ce-80d7-df25493ff743 service nova] Acquiring lock "5108cc8a-2c5f-4f1a-b114-d932d0f3e11e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1001.807338] env[69367]: DEBUG oslo_concurrency.lockutils [req-4b0e53bc-db9c-46bf-ba9f-b90e607eace3 req-f04dc3dc-e197-40ce-80d7-df25493ff743 service nova] Lock "5108cc8a-2c5f-4f1a-b114-d932d0f3e11e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1001.807590] env[69367]: DEBUG oslo_concurrency.lockutils [req-4b0e53bc-db9c-46bf-ba9f-b90e607eace3 req-f04dc3dc-e197-40ce-80d7-df25493ff743 service nova] Lock "5108cc8a-2c5f-4f1a-b114-d932d0f3e11e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1001.807701] env[69367]: DEBUG nova.compute.manager [req-4b0e53bc-db9c-46bf-ba9f-b90e607eace3 req-f04dc3dc-e197-40ce-80d7-df25493ff743 service nova] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] No waiting events found dispatching network-vif-plugged-eaedef65-ee11-437f-981f-bde5612a3c8f {{(pid=69367) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1001.807820] env[69367]: WARNING nova.compute.manager [req-4b0e53bc-db9c-46bf-ba9f-b90e607eace3 req-f04dc3dc-e197-40ce-80d7-df25493ff743 service nova] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Received unexpected event network-vif-plugged-eaedef65-ee11-437f-981f-bde5612a3c8f for instance with vm_state building and task_state spawning. [ 1001.894523] env[69367]: DEBUG nova.network.neutron [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Successfully updated port: eaedef65-ee11-437f-981f-bde5612a3c8f {{(pid=69367) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1002.009614] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69367) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1097}} [ 1002.009907] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.299s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1002.397601] env[69367]: DEBUG oslo_concurrency.lockutils [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Acquiring lock "refresh_cache-5108cc8a-2c5f-4f1a-b114-d932d0f3e11e" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1002.397789] env[69367]: DEBUG oslo_concurrency.lockutils [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Acquired lock "refresh_cache-5108cc8a-2c5f-4f1a-b114-d932d0f3e11e" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1002.398040] env[69367]: DEBUG nova.network.neutron [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1002.930122] env[69367]: DEBUG nova.network.neutron [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1003.059873] env[69367]: DEBUG nova.network.neutron [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Updating instance_info_cache with network_info: [{"id": "eaedef65-ee11-437f-981f-bde5612a3c8f", "address": "fa:16:3e:7e:74:f5", "network": {"id": "0bfdc337-bb57-4c33-9907-9098384ed460", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1159515822-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd27807405a646e989b95325358a87eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9bb629cd-6d0f-4bed-965c-bd04a2f3ec49", "external-id": "nsx-vlan-transportzone-848", "segmentation_id": 848, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeaedef65-ee", "ovs_interfaceid": "eaedef65-ee11-437f-981f-bde5612a3c8f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1003.562660] env[69367]: DEBUG oslo_concurrency.lockutils [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Releasing lock "refresh_cache-5108cc8a-2c5f-4f1a-b114-d932d0f3e11e" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1003.562975] env[69367]: DEBUG nova.compute.manager [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Instance network_info: |[{"id": "eaedef65-ee11-437f-981f-bde5612a3c8f", "address": "fa:16:3e:7e:74:f5", "network": {"id": "0bfdc337-bb57-4c33-9907-9098384ed460", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1159515822-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd27807405a646e989b95325358a87eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9bb629cd-6d0f-4bed-965c-bd04a2f3ec49", "external-id": "nsx-vlan-transportzone-848", "segmentation_id": 848, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeaedef65-ee", "ovs_interfaceid": "eaedef65-ee11-437f-981f-bde5612a3c8f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69367) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1003.563521] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7e:74:f5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9bb629cd-6d0f-4bed-965c-bd04a2f3ec49', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'eaedef65-ee11-437f-981f-bde5612a3c8f', 'vif_model': 'vmxnet3'}] {{(pid=69367) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1003.571116] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1003.571429] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Creating VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1003.571569] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dc234c48-a45b-49f6-aa61-710364756dae {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.595070] env[69367]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1003.595070] env[69367]: value = "task-4234153" [ 1003.595070] env[69367]: _type = "Task" [ 1003.595070] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.603464] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234153, 'name': CreateVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.834404] env[69367]: DEBUG nova.compute.manager [req-f3502f6c-ff1b-47ca-b997-2154d601c681 req-bffabc10-f4a2-442a-82b5-6885f46cbc9b service nova] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Received event network-changed-eaedef65-ee11-437f-981f-bde5612a3c8f {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 1003.834559] env[69367]: DEBUG nova.compute.manager [req-f3502f6c-ff1b-47ca-b997-2154d601c681 req-bffabc10-f4a2-442a-82b5-6885f46cbc9b service nova] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Refreshing instance network info cache due to event network-changed-eaedef65-ee11-437f-981f-bde5612a3c8f. {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11772}} [ 1003.834847] env[69367]: DEBUG oslo_concurrency.lockutils [req-f3502f6c-ff1b-47ca-b997-2154d601c681 req-bffabc10-f4a2-442a-82b5-6885f46cbc9b service nova] Acquiring lock "refresh_cache-5108cc8a-2c5f-4f1a-b114-d932d0f3e11e" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1003.834998] env[69367]: DEBUG oslo_concurrency.lockutils [req-f3502f6c-ff1b-47ca-b997-2154d601c681 req-bffabc10-f4a2-442a-82b5-6885f46cbc9b service nova] Acquired lock "refresh_cache-5108cc8a-2c5f-4f1a-b114-d932d0f3e11e" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1003.835181] env[69367]: DEBUG nova.network.neutron [req-f3502f6c-ff1b-47ca-b997-2154d601c681 req-bffabc10-f4a2-442a-82b5-6885f46cbc9b service nova] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Refreshing network info cache for port eaedef65-ee11-437f-981f-bde5612a3c8f {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1004.107085] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234153, 'name': CreateVM_Task, 'duration_secs': 0.414678} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.107471] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Created VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1004.107964] env[69367]: DEBUG oslo_concurrency.lockutils [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1004.108160] env[69367]: DEBUG oslo_concurrency.lockutils [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1004.108515] env[69367]: DEBUG oslo_concurrency.lockutils [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1004.108771] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ec9b8904-257b-4b9e-8fc3-aa9f820ff43a {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.114126] env[69367]: DEBUG oslo_vmware.api [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Waiting for the task: (returnval){ [ 1004.114126] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52d131b8-6fc4-d2ad-b091-ab70267a633e" [ 1004.114126] env[69367]: _type = "Task" [ 1004.114126] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.122761] env[69367]: DEBUG oslo_vmware.api [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52d131b8-6fc4-d2ad-b091-ab70267a633e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.523026] env[69367]: DEBUG nova.network.neutron [req-f3502f6c-ff1b-47ca-b997-2154d601c681 req-bffabc10-f4a2-442a-82b5-6885f46cbc9b service nova] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Updated VIF entry in instance network info cache for port eaedef65-ee11-437f-981f-bde5612a3c8f. {{(pid=69367) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1004.523414] env[69367]: DEBUG nova.network.neutron [req-f3502f6c-ff1b-47ca-b997-2154d601c681 req-bffabc10-f4a2-442a-82b5-6885f46cbc9b service nova] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Updating instance_info_cache with network_info: [{"id": "eaedef65-ee11-437f-981f-bde5612a3c8f", "address": "fa:16:3e:7e:74:f5", "network": {"id": "0bfdc337-bb57-4c33-9907-9098384ed460", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1159515822-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd27807405a646e989b95325358a87eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9bb629cd-6d0f-4bed-965c-bd04a2f3ec49", "external-id": "nsx-vlan-transportzone-848", "segmentation_id": 848, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeaedef65-ee", "ovs_interfaceid": "eaedef65-ee11-437f-981f-bde5612a3c8f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1004.624842] env[69367]: DEBUG oslo_vmware.api [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52d131b8-6fc4-d2ad-b091-ab70267a633e, 'name': SearchDatastore_Task, 'duration_secs': 0.041201} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.625092] env[69367]: DEBUG oslo_concurrency.lockutils [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1004.625352] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Processing image 2b099420-9152-4d93-9609-4c9317824c11 {{(pid=69367) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1004.625626] env[69367]: DEBUG oslo_concurrency.lockutils [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1004.625780] env[69367]: DEBUG oslo_concurrency.lockutils [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1004.625967] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1004.626235] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-496ae77d-de16-4303-9bb4-387286f3aa5b {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.634731] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1004.634902] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69367) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1004.635610] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bee9210a-b991-434f-85cf-51a06a894512 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.640753] env[69367]: DEBUG oslo_vmware.api [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Waiting for the task: (returnval){ [ 1004.640753] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52a181d3-3a7b-927e-b505-eff6fba0e625" [ 1004.640753] env[69367]: _type = "Task" [ 1004.640753] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.647993] env[69367]: DEBUG oslo_vmware.api [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52a181d3-3a7b-927e-b505-eff6fba0e625, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.026475] env[69367]: DEBUG oslo_concurrency.lockutils [req-f3502f6c-ff1b-47ca-b997-2154d601c681 req-bffabc10-f4a2-442a-82b5-6885f46cbc9b service nova] Releasing lock "refresh_cache-5108cc8a-2c5f-4f1a-b114-d932d0f3e11e" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1005.151314] env[69367]: DEBUG oslo_vmware.api [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52a181d3-3a7b-927e-b505-eff6fba0e625, 'name': SearchDatastore_Task, 'duration_secs': 0.008346} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.152115] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a64a062e-7a23-4d34-aed6-a183f5dc1830 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.157206] env[69367]: DEBUG oslo_vmware.api [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Waiting for the task: (returnval){ [ 1005.157206] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]5259b32e-27d9-edbf-6075-03bcf02ad401" [ 1005.157206] env[69367]: _type = "Task" [ 1005.157206] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.164857] env[69367]: DEBUG oslo_vmware.api [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]5259b32e-27d9-edbf-6075-03bcf02ad401, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.669474] env[69367]: DEBUG oslo_vmware.api [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]5259b32e-27d9-edbf-6075-03bcf02ad401, 'name': SearchDatastore_Task, 'duration_secs': 0.009687} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.669827] env[69367]: DEBUG oslo_concurrency.lockutils [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1005.670107] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore2] 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e/5108cc8a-2c5f-4f1a-b114-d932d0f3e11e.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1005.670428] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-63320e9b-efe3-49c1-b8e6-cccdc78f89e6 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.677798] env[69367]: DEBUG oslo_vmware.api [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Waiting for the task: (returnval){ [ 1005.677798] env[69367]: value = "task-4234154" [ 1005.677798] env[69367]: _type = "Task" [ 1005.677798] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.685927] env[69367]: DEBUG oslo_vmware.api [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': task-4234154, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.188532] env[69367]: DEBUG oslo_vmware.api [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': task-4234154, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.475378} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.189032] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore2] 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e/5108cc8a-2c5f-4f1a-b114-d932d0f3e11e.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1006.189200] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Extending root virtual disk to 1048576 {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1006.189376] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c53ab116-7f4b-4e9c-8394-9ecf467ec78e {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.197212] env[69367]: DEBUG oslo_vmware.api [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Waiting for the task: (returnval){ [ 1006.197212] env[69367]: value = "task-4234155" [ 1006.197212] env[69367]: _type = "Task" [ 1006.197212] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.206786] env[69367]: DEBUG oslo_vmware.api [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': task-4234155, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.708787] env[69367]: DEBUG oslo_vmware.api [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': task-4234155, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06218} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.709088] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Extended root virtual disk {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1006.709907] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e834f0cf-38a7-439c-a311-940c9bcd1f22 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.732178] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Reconfiguring VM instance instance-0000006d to attach disk [datastore2] 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e/5108cc8a-2c5f-4f1a-b114-d932d0f3e11e.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1006.732496] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3127d74a-a4db-49be-b057-d568e187a1b7 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.752474] env[69367]: DEBUG oslo_vmware.api [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Waiting for the task: (returnval){ [ 1006.752474] env[69367]: value = "task-4234156" [ 1006.752474] env[69367]: _type = "Task" [ 1006.752474] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.760586] env[69367]: DEBUG oslo_vmware.api [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': task-4234156, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.262178] env[69367]: DEBUG oslo_vmware.api [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': task-4234156, 'name': ReconfigVM_Task, 'duration_secs': 0.278116} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.262564] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Reconfigured VM instance instance-0000006d to attach disk [datastore2] 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e/5108cc8a-2c5f-4f1a-b114-d932d0f3e11e.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1007.263105] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9362d59a-ab90-4502-8491-199f49bf6936 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.269435] env[69367]: DEBUG oslo_vmware.api [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Waiting for the task: (returnval){ [ 1007.269435] env[69367]: value = "task-4234157" [ 1007.269435] env[69367]: _type = "Task" [ 1007.269435] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.277013] env[69367]: DEBUG oslo_vmware.api [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': task-4234157, 'name': Rename_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.778782] env[69367]: DEBUG oslo_vmware.api [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': task-4234157, 'name': Rename_Task, 'duration_secs': 0.139773} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.779032] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Powering on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1007.779279] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-587a965c-87bb-4718-93ec-28ae5b1885d7 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.785490] env[69367]: DEBUG oslo_vmware.api [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Waiting for the task: (returnval){ [ 1007.785490] env[69367]: value = "task-4234158" [ 1007.785490] env[69367]: _type = "Task" [ 1007.785490] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.793135] env[69367]: DEBUG oslo_vmware.api [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': task-4234158, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.295336] env[69367]: DEBUG oslo_vmware.api [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': task-4234158, 'name': PowerOnVM_Task, 'duration_secs': 0.44923} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.295705] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Powered on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1008.295791] env[69367]: INFO nova.compute.manager [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Took 6.62 seconds to spawn the instance on the hypervisor. [ 1008.295969] env[69367]: DEBUG nova.compute.manager [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Checking state {{(pid=69367) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1008.296744] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-053d290f-402f-4c46-a840-2061a77ff52d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.816515] env[69367]: INFO nova.compute.manager [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Took 11.45 seconds to build instance. [ 1009.319030] env[69367]: DEBUG oslo_concurrency.lockutils [None req-11205522-be53-454a-9480-ed1ece6e2c5f tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Lock "5108cc8a-2c5f-4f1a-b114-d932d0f3e11e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.958s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1009.382073] env[69367]: DEBUG nova.compute.manager [req-1366cde1-604e-4ad3-937f-e19f60212bd9 req-30970bed-129e-499b-b3f7-ae6d77dfaa8e service nova] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Received event network-changed-eaedef65-ee11-437f-981f-bde5612a3c8f {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 1009.382357] env[69367]: DEBUG nova.compute.manager [req-1366cde1-604e-4ad3-937f-e19f60212bd9 req-30970bed-129e-499b-b3f7-ae6d77dfaa8e service nova] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Refreshing instance network info cache due to event network-changed-eaedef65-ee11-437f-981f-bde5612a3c8f. {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11772}} [ 1009.382541] env[69367]: DEBUG oslo_concurrency.lockutils [req-1366cde1-604e-4ad3-937f-e19f60212bd9 req-30970bed-129e-499b-b3f7-ae6d77dfaa8e service nova] Acquiring lock "refresh_cache-5108cc8a-2c5f-4f1a-b114-d932d0f3e11e" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1009.382652] env[69367]: DEBUG oslo_concurrency.lockutils [req-1366cde1-604e-4ad3-937f-e19f60212bd9 req-30970bed-129e-499b-b3f7-ae6d77dfaa8e service nova] Acquired lock "refresh_cache-5108cc8a-2c5f-4f1a-b114-d932d0f3e11e" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1009.382816] env[69367]: DEBUG nova.network.neutron [req-1366cde1-604e-4ad3-937f-e19f60212bd9 req-30970bed-129e-499b-b3f7-ae6d77dfaa8e service nova] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Refreshing network info cache for port eaedef65-ee11-437f-981f-bde5612a3c8f {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1010.090944] env[69367]: DEBUG nova.network.neutron [req-1366cde1-604e-4ad3-937f-e19f60212bd9 req-30970bed-129e-499b-b3f7-ae6d77dfaa8e service nova] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Updated VIF entry in instance network info cache for port eaedef65-ee11-437f-981f-bde5612a3c8f. {{(pid=69367) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1010.091407] env[69367]: DEBUG nova.network.neutron [req-1366cde1-604e-4ad3-937f-e19f60212bd9 req-30970bed-129e-499b-b3f7-ae6d77dfaa8e service nova] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Updating instance_info_cache with network_info: [{"id": "eaedef65-ee11-437f-981f-bde5612a3c8f", "address": "fa:16:3e:7e:74:f5", "network": {"id": "0bfdc337-bb57-4c33-9907-9098384ed460", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1159515822-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.185", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd27807405a646e989b95325358a87eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9bb629cd-6d0f-4bed-965c-bd04a2f3ec49", "external-id": "nsx-vlan-transportzone-848", "segmentation_id": 848, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeaedef65-ee", "ovs_interfaceid": "eaedef65-ee11-437f-981f-bde5612a3c8f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1010.594317] env[69367]: DEBUG oslo_concurrency.lockutils [req-1366cde1-604e-4ad3-937f-e19f60212bd9 req-30970bed-129e-499b-b3f7-ae6d77dfaa8e service nova] Releasing lock "refresh_cache-5108cc8a-2c5f-4f1a-b114-d932d0f3e11e" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1035.188527] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1035.188915] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1035.697160] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1035.697382] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1035.697423] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1035.697574] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1035.697728] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1035.697876] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1035.698022] env[69367]: DEBUG nova.compute.manager [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69367) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11183}} [ 1035.698174] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1036.201716] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1036.202195] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1036.202414] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1036.202638] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69367) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 1036.203590] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e944025-9807-48f2-b971-4af475c01dc1 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.212230] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfb7cb14-fdb2-42a4-b08e-7a9b5f1786cb {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.226573] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0d39e5d-617b-4b91-9746-1b0ebf97cbd7 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.233166] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b050b7d0-db50-4d49-a4d4-1bac45b42cbd {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.261277] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180396MB free_disk=1GB free_vcpus=48 pci_devices=None {{(pid=69367) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 1036.261438] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1036.261640] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1037.295946] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance a358ce6d-9826-4ddb-8c2f-51bac8db59d4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1037.296293] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 4e346ed1-36e9-421d-975f-e8bb6f05c0a0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1037.296293] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance e1c7d100-4ad7-4871-970f-bb7562bfc6fc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1037.296359] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1037.296478] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance ab365570-ac29-4094-be4c-d49563a465c8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1037.296600] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance c17525ee-d038-4c81-932b-ed74a6de6cb5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1037.296709] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 92c27615-d377-492f-a9db-ff45b2e71537 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1037.296821] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1037.296932] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 011ab7de-98a7-41fc-9e05-e71965c73c09 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1037.297052] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance f8c07fa1-d27c-4d0f-847b-481477cd04bf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1037.297166] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 7f937d89-684b-44f5-9f30-783aeafe99d1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1037.297275] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance ab9d8e3e-65c5-4ac9-920f-3042b8cf2054 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1037.297383] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance c272b0ae-6313-46ab-977c-6de255e77675 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1037.297493] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance d900df05-b65c-4a45-94d1-563afbf9c022 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1037.297603] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 05aae150-5d86-4210-ae7e-8c63e83cb907 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1037.297709] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 42db60d9-e5f7-4925-8f6f-d3884687414a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1037.297816] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1037.298052] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Total usable vcpus: 48, total allocated vcpus: 17 {{(pid=69367) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 1037.298196] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3776MB phys_disk=200GB used_disk=17GB total_vcpus=48 used_vcpus=17 pci_stats=[] {{(pid=69367) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 1037.491078] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65ec130e-66fa-461b-ab62-f4c1f8c834e5 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.498906] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f99a972b-8364-4ea7-b397-dbcad16202ce {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.527481] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6977c018-f179-4bce-be7f-2288f0da0ce7 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.535061] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c60aec2b-69d9-49f2-9946-b97aad377a88 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.547811] env[69367]: DEBUG nova.compute.provider_tree [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Inventory has not changed in ProviderTree for provider: 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1038.051666] env[69367]: DEBUG nova.scheduler.client.report [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Inventory has not changed for provider 19ddf8be-7305-4f70-8366-52a9957232e6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1038.557659] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69367) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1097}} [ 1038.558045] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.296s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1047.944932] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5c568764-0f16-43e5-8814-2476dee6498c tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Acquiring lock "5108cc8a-2c5f-4f1a-b114-d932d0f3e11e" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1047.944932] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5c568764-0f16-43e5-8814-2476dee6498c tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Lock "5108cc8a-2c5f-4f1a-b114-d932d0f3e11e" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1048.450063] env[69367]: DEBUG nova.compute.utils [None req-5c568764-0f16-43e5-8814-2476dee6498c tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Using /dev/sd instead of None {{(pid=69367) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1048.953362] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5c568764-0f16-43e5-8814-2476dee6498c tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Lock "5108cc8a-2c5f-4f1a-b114-d932d0f3e11e" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1050.006628] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5c568764-0f16-43e5-8814-2476dee6498c tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Acquiring lock "5108cc8a-2c5f-4f1a-b114-d932d0f3e11e" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1050.006987] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5c568764-0f16-43e5-8814-2476dee6498c tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Lock "5108cc8a-2c5f-4f1a-b114-d932d0f3e11e" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1050.007149] env[69367]: INFO nova.compute.manager [None req-5c568764-0f16-43e5-8814-2476dee6498c tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Attaching volume 6276d4d3-d6a6-45b6-8615-af919c7d7278 to /dev/sdb [ 1050.036854] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b84d6aac-702e-447b-89f5-fb0b1dcd9fca {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.044561] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99cf1bbc-ca79-4653-8da8-5528743e7088 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.057958] env[69367]: DEBUG nova.virt.block_device [None req-5c568764-0f16-43e5-8814-2476dee6498c tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Updating existing volume attachment record: 1b842bd3-8c97-4482-aa80-f9dbea9a3fc3 {{(pid=69367) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1054.602125] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-5c568764-0f16-43e5-8814-2476dee6498c tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Volume attach. Driver type: vmdk {{(pid=69367) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1054.602402] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-5c568764-0f16-43e5-8814-2476dee6498c tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-837759', 'volume_id': '6276d4d3-d6a6-45b6-8615-af919c7d7278', 'name': 'volume-6276d4d3-d6a6-45b6-8615-af919c7d7278', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5108cc8a-2c5f-4f1a-b114-d932d0f3e11e', 'attached_at': '', 'detached_at': '', 'volume_id': '6276d4d3-d6a6-45b6-8615-af919c7d7278', 'serial': '6276d4d3-d6a6-45b6-8615-af919c7d7278'} {{(pid=69367) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1054.603297] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa9af419-0a2c-4e6b-8482-70fe50ebe7b6 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.619677] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-640ccda2-0379-4702-8c51-6a1d6a746063 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.644092] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-5c568764-0f16-43e5-8814-2476dee6498c tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Reconfiguring VM instance instance-0000006d to attach disk [datastore2] volume-6276d4d3-d6a6-45b6-8615-af919c7d7278/volume-6276d4d3-d6a6-45b6-8615-af919c7d7278.vmdk or device None with type thin {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1054.644380] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b263454d-8fa4-4469-881f-4f9a58bc39d1 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.662018] env[69367]: DEBUG oslo_vmware.api [None req-5c568764-0f16-43e5-8814-2476dee6498c tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Waiting for the task: (returnval){ [ 1054.662018] env[69367]: value = "task-4234161" [ 1054.662018] env[69367]: _type = "Task" [ 1054.662018] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.669813] env[69367]: DEBUG oslo_vmware.api [None req-5c568764-0f16-43e5-8814-2476dee6498c tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': task-4234161, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.174263] env[69367]: DEBUG oslo_vmware.api [None req-5c568764-0f16-43e5-8814-2476dee6498c tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': task-4234161, 'name': ReconfigVM_Task, 'duration_secs': 0.33483} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.174542] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-5c568764-0f16-43e5-8814-2476dee6498c tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Reconfigured VM instance instance-0000006d to attach disk [datastore2] volume-6276d4d3-d6a6-45b6-8615-af919c7d7278/volume-6276d4d3-d6a6-45b6-8615-af919c7d7278.vmdk or device None with type thin {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1055.179095] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fdc65fbf-71d2-4b71-9049-2d8222c85536 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.193676] env[69367]: DEBUG oslo_vmware.api [None req-5c568764-0f16-43e5-8814-2476dee6498c tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Waiting for the task: (returnval){ [ 1055.193676] env[69367]: value = "task-4234162" [ 1055.193676] env[69367]: _type = "Task" [ 1055.193676] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.201620] env[69367]: DEBUG oslo_vmware.api [None req-5c568764-0f16-43e5-8814-2476dee6498c tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': task-4234162, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.704387] env[69367]: DEBUG oslo_vmware.api [None req-5c568764-0f16-43e5-8814-2476dee6498c tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': task-4234162, 'name': ReconfigVM_Task, 'duration_secs': 0.157081} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.704874] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-5c568764-0f16-43e5-8814-2476dee6498c tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-837759', 'volume_id': '6276d4d3-d6a6-45b6-8615-af919c7d7278', 'name': 'volume-6276d4d3-d6a6-45b6-8615-af919c7d7278', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5108cc8a-2c5f-4f1a-b114-d932d0f3e11e', 'attached_at': '', 'detached_at': '', 'volume_id': '6276d4d3-d6a6-45b6-8615-af919c7d7278', 'serial': '6276d4d3-d6a6-45b6-8615-af919c7d7278'} {{(pid=69367) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1056.741593] env[69367]: DEBUG nova.objects.instance [None req-5c568764-0f16-43e5-8814-2476dee6498c tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Lazy-loading 'flavor' on Instance uuid 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e {{(pid=69367) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1057.248440] env[69367]: DEBUG oslo_concurrency.lockutils [None req-5c568764-0f16-43e5-8814-2476dee6498c tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Lock "5108cc8a-2c5f-4f1a-b114-d932d0f3e11e" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.241s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1058.104788] env[69367]: DEBUG oslo_concurrency.lockutils [None req-490f0b91-8813-40f7-bed7-534bdd158573 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Acquiring lock "5108cc8a-2c5f-4f1a-b114-d932d0f3e11e" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1058.105284] env[69367]: DEBUG oslo_concurrency.lockutils [None req-490f0b91-8813-40f7-bed7-534bdd158573 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Lock "5108cc8a-2c5f-4f1a-b114-d932d0f3e11e" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1058.608916] env[69367]: DEBUG nova.compute.utils [None req-490f0b91-8813-40f7-bed7-534bdd158573 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Using /dev/sd instead of None {{(pid=69367) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1059.111863] env[69367]: DEBUG oslo_concurrency.lockutils [None req-490f0b91-8813-40f7-bed7-534bdd158573 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Lock "5108cc8a-2c5f-4f1a-b114-d932d0f3e11e" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1060.166895] env[69367]: DEBUG oslo_concurrency.lockutils [None req-490f0b91-8813-40f7-bed7-534bdd158573 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Acquiring lock "5108cc8a-2c5f-4f1a-b114-d932d0f3e11e" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1060.167247] env[69367]: DEBUG oslo_concurrency.lockutils [None req-490f0b91-8813-40f7-bed7-534bdd158573 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Lock "5108cc8a-2c5f-4f1a-b114-d932d0f3e11e" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1060.167446] env[69367]: INFO nova.compute.manager [None req-490f0b91-8813-40f7-bed7-534bdd158573 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Attaching volume 7e310edc-2bcd-4f71-8654-1383f839449c to /dev/sdc [ 1060.197992] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c31a3659-3165-4f92-bd93-f533150e9d89 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.205359] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57eba72c-3d6e-47c7-b347-e66004d9c2e6 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.219389] env[69367]: DEBUG nova.virt.block_device [None req-490f0b91-8813-40f7-bed7-534bdd158573 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Updating existing volume attachment record: 54e8a332-7112-4a5a-982d-03a33a2f26b5 {{(pid=69367) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1064.765656] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-490f0b91-8813-40f7-bed7-534bdd158573 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Volume attach. Driver type: vmdk {{(pid=69367) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1064.765908] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-490f0b91-8813-40f7-bed7-534bdd158573 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-837760', 'volume_id': '7e310edc-2bcd-4f71-8654-1383f839449c', 'name': 'volume-7e310edc-2bcd-4f71-8654-1383f839449c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5108cc8a-2c5f-4f1a-b114-d932d0f3e11e', 'attached_at': '', 'detached_at': '', 'volume_id': '7e310edc-2bcd-4f71-8654-1383f839449c', 'serial': '7e310edc-2bcd-4f71-8654-1383f839449c'} {{(pid=69367) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1064.766846] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1afa773-745b-480a-b381-1d02cafc04b5 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.785202] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2da488ff-5927-4545-a472-e7b17871e7b5 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.813443] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-490f0b91-8813-40f7-bed7-534bdd158573 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Reconfiguring VM instance instance-0000006d to attach disk [datastore2] volume-7e310edc-2bcd-4f71-8654-1383f839449c/volume-7e310edc-2bcd-4f71-8654-1383f839449c.vmdk or device None with type thin {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1064.813780] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aeb6436a-736d-48d3-a5ea-84a62411ea4f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.832286] env[69367]: DEBUG oslo_vmware.api [None req-490f0b91-8813-40f7-bed7-534bdd158573 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Waiting for the task: (returnval){ [ 1064.832286] env[69367]: value = "task-4234165" [ 1064.832286] env[69367]: _type = "Task" [ 1064.832286] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.840344] env[69367]: DEBUG oslo_vmware.api [None req-490f0b91-8813-40f7-bed7-534bdd158573 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': task-4234165, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.342315] env[69367]: DEBUG oslo_vmware.api [None req-490f0b91-8813-40f7-bed7-534bdd158573 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': task-4234165, 'name': ReconfigVM_Task, 'duration_secs': 0.378058} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.342599] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-490f0b91-8813-40f7-bed7-534bdd158573 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Reconfigured VM instance instance-0000006d to attach disk [datastore2] volume-7e310edc-2bcd-4f71-8654-1383f839449c/volume-7e310edc-2bcd-4f71-8654-1383f839449c.vmdk or device None with type thin {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1065.347858] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-16240b5b-4ef9-49da-820e-2df207863377 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.363774] env[69367]: DEBUG oslo_vmware.api [None req-490f0b91-8813-40f7-bed7-534bdd158573 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Waiting for the task: (returnval){ [ 1065.363774] env[69367]: value = "task-4234166" [ 1065.363774] env[69367]: _type = "Task" [ 1065.363774] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.375138] env[69367]: DEBUG oslo_vmware.api [None req-490f0b91-8813-40f7-bed7-534bdd158573 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': task-4234166, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.873900] env[69367]: DEBUG oslo_vmware.api [None req-490f0b91-8813-40f7-bed7-534bdd158573 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': task-4234166, 'name': ReconfigVM_Task, 'duration_secs': 0.140396} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.874231] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-490f0b91-8813-40f7-bed7-534bdd158573 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-837760', 'volume_id': '7e310edc-2bcd-4f71-8654-1383f839449c', 'name': 'volume-7e310edc-2bcd-4f71-8654-1383f839449c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5108cc8a-2c5f-4f1a-b114-d932d0f3e11e', 'attached_at': '', 'detached_at': '', 'volume_id': '7e310edc-2bcd-4f71-8654-1383f839449c', 'serial': '7e310edc-2bcd-4f71-8654-1383f839449c'} {{(pid=69367) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1066.910439] env[69367]: DEBUG nova.objects.instance [None req-490f0b91-8813-40f7-bed7-534bdd158573 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Lazy-loading 'flavor' on Instance uuid 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e {{(pid=69367) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1067.416356] env[69367]: DEBUG oslo_concurrency.lockutils [None req-490f0b91-8813-40f7-bed7-534bdd158573 tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Lock "5108cc8a-2c5f-4f1a-b114-d932d0f3e11e" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.249s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1067.709992] env[69367]: DEBUG oslo_concurrency.lockutils [None req-12cc4b94-d3b6-4fe5-9295-fa2d4fc62aee tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Acquiring lock "5108cc8a-2c5f-4f1a-b114-d932d0f3e11e" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1067.710198] env[69367]: DEBUG oslo_concurrency.lockutils [None req-12cc4b94-d3b6-4fe5-9295-fa2d4fc62aee tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Lock "5108cc8a-2c5f-4f1a-b114-d932d0f3e11e" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1068.213755] env[69367]: INFO nova.compute.manager [None req-12cc4b94-d3b6-4fe5-9295-fa2d4fc62aee tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Detaching volume 6276d4d3-d6a6-45b6-8615-af919c7d7278 [ 1068.244308] env[69367]: INFO nova.virt.block_device [None req-12cc4b94-d3b6-4fe5-9295-fa2d4fc62aee tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Attempting to driver detach volume 6276d4d3-d6a6-45b6-8615-af919c7d7278 from mountpoint /dev/sdb [ 1068.244569] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-12cc4b94-d3b6-4fe5-9295-fa2d4fc62aee tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Volume detach. Driver type: vmdk {{(pid=69367) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1068.244784] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-12cc4b94-d3b6-4fe5-9295-fa2d4fc62aee tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-837759', 'volume_id': '6276d4d3-d6a6-45b6-8615-af919c7d7278', 'name': 'volume-6276d4d3-d6a6-45b6-8615-af919c7d7278', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5108cc8a-2c5f-4f1a-b114-d932d0f3e11e', 'attached_at': '', 'detached_at': '', 'volume_id': '6276d4d3-d6a6-45b6-8615-af919c7d7278', 'serial': '6276d4d3-d6a6-45b6-8615-af919c7d7278'} {{(pid=69367) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1068.245716] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37bdb9ff-66f9-4c39-a2ae-efea31f02a0f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.270771] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f48b8d39-82a2-4d72-aa4d-17c894bee295 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.278768] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-512b4ce6-9e03-4cad-82d1-644d4d55df8e {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.301994] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55802460-8ea9-4c55-9bb2-bdecf41975ec {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.317846] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-12cc4b94-d3b6-4fe5-9295-fa2d4fc62aee tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] The volume has not been displaced from its original location: [datastore2] volume-6276d4d3-d6a6-45b6-8615-af919c7d7278/volume-6276d4d3-d6a6-45b6-8615-af919c7d7278.vmdk. No consolidation needed. {{(pid=69367) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1068.323254] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-12cc4b94-d3b6-4fe5-9295-fa2d4fc62aee tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Reconfiguring VM instance instance-0000006d to detach disk 2001 {{(pid=69367) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1068.323582] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8b0b07ad-e00b-46ac-9fae-dc012bfeb851 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.341720] env[69367]: DEBUG oslo_vmware.api [None req-12cc4b94-d3b6-4fe5-9295-fa2d4fc62aee tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Waiting for the task: (returnval){ [ 1068.341720] env[69367]: value = "task-4234167" [ 1068.341720] env[69367]: _type = "Task" [ 1068.341720] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.349781] env[69367]: DEBUG oslo_vmware.api [None req-12cc4b94-d3b6-4fe5-9295-fa2d4fc62aee tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': task-4234167, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.851273] env[69367]: DEBUG oslo_vmware.api [None req-12cc4b94-d3b6-4fe5-9295-fa2d4fc62aee tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': task-4234167, 'name': ReconfigVM_Task, 'duration_secs': 0.237112} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.851469] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-12cc4b94-d3b6-4fe5-9295-fa2d4fc62aee tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Reconfigured VM instance instance-0000006d to detach disk 2001 {{(pid=69367) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1068.856106] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d6acc935-1a81-4179-a4f9-f0e577655d81 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.871707] env[69367]: DEBUG oslo_vmware.api [None req-12cc4b94-d3b6-4fe5-9295-fa2d4fc62aee tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Waiting for the task: (returnval){ [ 1068.871707] env[69367]: value = "task-4234168" [ 1068.871707] env[69367]: _type = "Task" [ 1068.871707] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.880138] env[69367]: DEBUG oslo_vmware.api [None req-12cc4b94-d3b6-4fe5-9295-fa2d4fc62aee tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': task-4234168, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.382676] env[69367]: DEBUG oslo_vmware.api [None req-12cc4b94-d3b6-4fe5-9295-fa2d4fc62aee tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': task-4234168, 'name': ReconfigVM_Task, 'duration_secs': 0.141454} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.383106] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-12cc4b94-d3b6-4fe5-9295-fa2d4fc62aee tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-837759', 'volume_id': '6276d4d3-d6a6-45b6-8615-af919c7d7278', 'name': 'volume-6276d4d3-d6a6-45b6-8615-af919c7d7278', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5108cc8a-2c5f-4f1a-b114-d932d0f3e11e', 'attached_at': '', 'detached_at': '', 'volume_id': '6276d4d3-d6a6-45b6-8615-af919c7d7278', 'serial': '6276d4d3-d6a6-45b6-8615-af919c7d7278'} {{(pid=69367) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1069.929942] env[69367]: DEBUG nova.objects.instance [None req-12cc4b94-d3b6-4fe5-9295-fa2d4fc62aee tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Lazy-loading 'flavor' on Instance uuid 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e {{(pid=69367) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1070.937868] env[69367]: DEBUG oslo_concurrency.lockutils [None req-12cc4b94-d3b6-4fe5-9295-fa2d4fc62aee tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Lock "5108cc8a-2c5f-4f1a-b114-d932d0f3e11e" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.227s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1070.959820] env[69367]: DEBUG oslo_concurrency.lockutils [None req-00f9e33b-e8ff-4dd4-867c-684939d73b2b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Acquiring lock "5108cc8a-2c5f-4f1a-b114-d932d0f3e11e" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1070.960193] env[69367]: DEBUG oslo_concurrency.lockutils [None req-00f9e33b-e8ff-4dd4-867c-684939d73b2b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Lock "5108cc8a-2c5f-4f1a-b114-d932d0f3e11e" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1071.462718] env[69367]: INFO nova.compute.manager [None req-00f9e33b-e8ff-4dd4-867c-684939d73b2b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Detaching volume 7e310edc-2bcd-4f71-8654-1383f839449c [ 1071.493622] env[69367]: INFO nova.virt.block_device [None req-00f9e33b-e8ff-4dd4-867c-684939d73b2b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Attempting to driver detach volume 7e310edc-2bcd-4f71-8654-1383f839449c from mountpoint /dev/sdc [ 1071.493881] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-00f9e33b-e8ff-4dd4-867c-684939d73b2b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Volume detach. Driver type: vmdk {{(pid=69367) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1071.494086] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-00f9e33b-e8ff-4dd4-867c-684939d73b2b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-837760', 'volume_id': '7e310edc-2bcd-4f71-8654-1383f839449c', 'name': 'volume-7e310edc-2bcd-4f71-8654-1383f839449c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5108cc8a-2c5f-4f1a-b114-d932d0f3e11e', 'attached_at': '', 'detached_at': '', 'volume_id': '7e310edc-2bcd-4f71-8654-1383f839449c', 'serial': '7e310edc-2bcd-4f71-8654-1383f839449c'} {{(pid=69367) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1071.494949] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64ee2bae-82ce-414d-bf09-c8cb41dc9fa5 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.517039] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f879c1d-e359-42b9-9eb4-cdc16b9401cb {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.524961] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab974480-5e1b-47e4-a2a1-de4c970397f2 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.545632] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48ea8069-6c38-4eb7-9340-5c2e5618aafd {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.562359] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-00f9e33b-e8ff-4dd4-867c-684939d73b2b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] The volume has not been displaced from its original location: [datastore2] volume-7e310edc-2bcd-4f71-8654-1383f839449c/volume-7e310edc-2bcd-4f71-8654-1383f839449c.vmdk. No consolidation needed. {{(pid=69367) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1071.567501] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-00f9e33b-e8ff-4dd4-867c-684939d73b2b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Reconfiguring VM instance instance-0000006d to detach disk 2002 {{(pid=69367) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1071.567809] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-54dbde7e-bd52-4ec9-a6a8-9fa436fedef2 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.586972] env[69367]: DEBUG oslo_vmware.api [None req-00f9e33b-e8ff-4dd4-867c-684939d73b2b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Waiting for the task: (returnval){ [ 1071.586972] env[69367]: value = "task-4234169" [ 1071.586972] env[69367]: _type = "Task" [ 1071.586972] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.597282] env[69367]: DEBUG oslo_vmware.api [None req-00f9e33b-e8ff-4dd4-867c-684939d73b2b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': task-4234169, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.097236] env[69367]: DEBUG oslo_vmware.api [None req-00f9e33b-e8ff-4dd4-867c-684939d73b2b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': task-4234169, 'name': ReconfigVM_Task, 'duration_secs': 0.231404} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.097597] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-00f9e33b-e8ff-4dd4-867c-684939d73b2b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Reconfigured VM instance instance-0000006d to detach disk 2002 {{(pid=69367) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1072.102420] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-98853319-66b5-4902-af37-6131fee7fb61 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.119343] env[69367]: DEBUG oslo_vmware.api [None req-00f9e33b-e8ff-4dd4-867c-684939d73b2b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Waiting for the task: (returnval){ [ 1072.119343] env[69367]: value = "task-4234170" [ 1072.119343] env[69367]: _type = "Task" [ 1072.119343] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.129926] env[69367]: DEBUG oslo_vmware.api [None req-00f9e33b-e8ff-4dd4-867c-684939d73b2b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': task-4234170, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.629949] env[69367]: DEBUG oslo_vmware.api [None req-00f9e33b-e8ff-4dd4-867c-684939d73b2b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': task-4234170, 'name': ReconfigVM_Task, 'duration_secs': 0.141742} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.630273] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-00f9e33b-e8ff-4dd4-867c-684939d73b2b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-837760', 'volume_id': '7e310edc-2bcd-4f71-8654-1383f839449c', 'name': 'volume-7e310edc-2bcd-4f71-8654-1383f839449c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5108cc8a-2c5f-4f1a-b114-d932d0f3e11e', 'attached_at': '', 'detached_at': '', 'volume_id': '7e310edc-2bcd-4f71-8654-1383f839449c', 'serial': '7e310edc-2bcd-4f71-8654-1383f839449c'} {{(pid=69367) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1073.173778] env[69367]: DEBUG nova.objects.instance [None req-00f9e33b-e8ff-4dd4-867c-684939d73b2b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Lazy-loading 'flavor' on Instance uuid 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e {{(pid=69367) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1074.181229] env[69367]: DEBUG oslo_concurrency.lockutils [None req-00f9e33b-e8ff-4dd4-867c-684939d73b2b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Lock "5108cc8a-2c5f-4f1a-b114-d932d0f3e11e" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.221s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1075.352278] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0d930da0-0210-482d-8a98-5673c52f7a1b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Acquiring lock "5108cc8a-2c5f-4f1a-b114-d932d0f3e11e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1075.352669] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0d930da0-0210-482d-8a98-5673c52f7a1b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Lock "5108cc8a-2c5f-4f1a-b114-d932d0f3e11e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1075.352728] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0d930da0-0210-482d-8a98-5673c52f7a1b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Acquiring lock "5108cc8a-2c5f-4f1a-b114-d932d0f3e11e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1075.352907] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0d930da0-0210-482d-8a98-5673c52f7a1b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Lock "5108cc8a-2c5f-4f1a-b114-d932d0f3e11e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1075.353096] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0d930da0-0210-482d-8a98-5673c52f7a1b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Lock "5108cc8a-2c5f-4f1a-b114-d932d0f3e11e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1075.355405] env[69367]: INFO nova.compute.manager [None req-0d930da0-0210-482d-8a98-5673c52f7a1b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Terminating instance [ 1075.859500] env[69367]: DEBUG nova.compute.manager [None req-0d930da0-0210-482d-8a98-5673c52f7a1b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Start destroying the instance on the hypervisor. {{(pid=69367) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1075.859687] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-0d930da0-0210-482d-8a98-5673c52f7a1b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Destroying instance {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1075.860609] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-075c9fd4-07b9-4995-978b-4dfe6d99d147 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.869242] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d930da0-0210-482d-8a98-5673c52f7a1b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Powering off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1075.869504] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ce78048a-919f-4212-91b7-5a4affd80ece {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.877354] env[69367]: DEBUG oslo_vmware.api [None req-0d930da0-0210-482d-8a98-5673c52f7a1b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Waiting for the task: (returnval){ [ 1075.877354] env[69367]: value = "task-4234171" [ 1075.877354] env[69367]: _type = "Task" [ 1075.877354] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.887094] env[69367]: DEBUG oslo_vmware.api [None req-0d930da0-0210-482d-8a98-5673c52f7a1b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': task-4234171, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.388173] env[69367]: DEBUG oslo_vmware.api [None req-0d930da0-0210-482d-8a98-5673c52f7a1b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': task-4234171, 'name': PowerOffVM_Task, 'duration_secs': 0.202941} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.388574] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d930da0-0210-482d-8a98-5673c52f7a1b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Powered off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1076.388574] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-0d930da0-0210-482d-8a98-5673c52f7a1b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Unregistering the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1076.388834] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-14866dd9-3a31-4fbe-b591-a597c124c548 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.458186] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-0d930da0-0210-482d-8a98-5673c52f7a1b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Unregistered the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1076.458439] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-0d930da0-0210-482d-8a98-5673c52f7a1b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Deleting contents of the VM from datastore datastore2 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1076.458583] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d930da0-0210-482d-8a98-5673c52f7a1b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Deleting the datastore file [datastore2] 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1076.458864] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-910731d1-6fae-4d13-94fc-2a65a9e962c7 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.466422] env[69367]: DEBUG oslo_vmware.api [None req-0d930da0-0210-482d-8a98-5673c52f7a1b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Waiting for the task: (returnval){ [ 1076.466422] env[69367]: value = "task-4234173" [ 1076.466422] env[69367]: _type = "Task" [ 1076.466422] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.475114] env[69367]: DEBUG oslo_vmware.api [None req-0d930da0-0210-482d-8a98-5673c52f7a1b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': task-4234173, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.977983] env[69367]: DEBUG oslo_vmware.api [None req-0d930da0-0210-482d-8a98-5673c52f7a1b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Task: {'id': task-4234173, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.141679} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.978283] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d930da0-0210-482d-8a98-5673c52f7a1b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Deleted the datastore file {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1076.978476] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-0d930da0-0210-482d-8a98-5673c52f7a1b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Deleted contents of the VM from datastore datastore2 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1076.978657] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-0d930da0-0210-482d-8a98-5673c52f7a1b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Instance destroyed {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1076.978838] env[69367]: INFO nova.compute.manager [None req-0d930da0-0210-482d-8a98-5673c52f7a1b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1076.979092] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0d930da0-0210-482d-8a98-5673c52f7a1b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1076.979298] env[69367]: DEBUG nova.compute.manager [-] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1076.979396] env[69367]: DEBUG nova.network.neutron [-] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1077.441993] env[69367]: DEBUG nova.compute.manager [req-ff50ffc1-54ec-436e-a19e-c8dccabcc650 req-279e3f28-7be8-4409-bdbb-07f818434c12 service nova] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Received event network-vif-deleted-eaedef65-ee11-437f-981f-bde5612a3c8f {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 1077.442332] env[69367]: INFO nova.compute.manager [req-ff50ffc1-54ec-436e-a19e-c8dccabcc650 req-279e3f28-7be8-4409-bdbb-07f818434c12 service nova] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Neutron deleted interface eaedef65-ee11-437f-981f-bde5612a3c8f; detaching it from the instance and deleting it from the info cache [ 1077.442650] env[69367]: DEBUG nova.network.neutron [req-ff50ffc1-54ec-436e-a19e-c8dccabcc650 req-279e3f28-7be8-4409-bdbb-07f818434c12 service nova] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1077.926031] env[69367]: DEBUG nova.network.neutron [-] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1077.946078] env[69367]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9e7c323c-5ec2-4b8f-9e21-8dbb30ff24a8 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.956291] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa64c53d-c006-42e2-af35-e88d3c224ad7 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.983868] env[69367]: DEBUG nova.compute.manager [req-ff50ffc1-54ec-436e-a19e-c8dccabcc650 req-279e3f28-7be8-4409-bdbb-07f818434c12 service nova] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Detach interface failed, port_id=eaedef65-ee11-437f-981f-bde5612a3c8f, reason: Instance 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e could not be found. {{(pid=69367) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11601}} [ 1078.429124] env[69367]: INFO nova.compute.manager [-] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Took 1.45 seconds to deallocate network for instance. [ 1078.936455] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0d930da0-0210-482d-8a98-5673c52f7a1b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1078.936847] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0d930da0-0210-482d-8a98-5673c52f7a1b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1078.936847] env[69367]: DEBUG nova.objects.instance [None req-0d930da0-0210-482d-8a98-5673c52f7a1b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Lazy-loading 'resources' on Instance uuid 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e {{(pid=69367) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1079.668784] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bad6828-ec21-42b3-bbe9-8d4577b562dd {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.680780] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d64c6a8-7cb3-4efb-8a66-2981be2c369e {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.735441] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-605dd084-e137-4624-87ec-e9112ef39bb1 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.747552] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b07bf831-6e83-41e8-b9fe-c3ccd3542e6a {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.770998] env[69367]: DEBUG nova.compute.provider_tree [None req-0d930da0-0210-482d-8a98-5673c52f7a1b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Inventory has not changed in ProviderTree for provider: 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1080.274961] env[69367]: DEBUG nova.scheduler.client.report [None req-0d930da0-0210-482d-8a98-5673c52f7a1b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Inventory has not changed for provider 19ddf8be-7305-4f70-8366-52a9957232e6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1080.780369] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0d930da0-0210-482d-8a98-5673c52f7a1b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.844s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1080.800259] env[69367]: INFO nova.scheduler.client.report [None req-0d930da0-0210-482d-8a98-5673c52f7a1b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Deleted allocations for instance 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e [ 1081.308477] env[69367]: DEBUG oslo_concurrency.lockutils [None req-0d930da0-0210-482d-8a98-5673c52f7a1b tempest-AttachVolumeTestJSON-513705930 tempest-AttachVolumeTestJSON-513705930-project-member] Lock "5108cc8a-2c5f-4f1a-b114-d932d0f3e11e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.956s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1098.559401] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1098.559823] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1098.559823] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1098.559962] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1098.560098] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1098.560250] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1098.560405] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1098.560552] env[69367]: DEBUG nova.compute.manager [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69367) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11183}} [ 1098.560702] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1099.064482] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1099.064752] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1099.064920] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1099.065098] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69367) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 1099.066062] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc41ead4-54b1-4621-8abc-1ea71720bd23 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.075211] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d286f121-d45a-4e18-9a10-a0ebc3907202 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.092461] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68fc568a-f245-4064-9b55-c6565f1d0e29 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.099909] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-140ca500-0fea-4f0b-bc52-c562ed4f005f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.129017] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180217MB free_disk=1GB free_vcpus=48 pci_devices=None {{(pid=69367) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 1099.129253] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1099.129415] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1100.165752] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance a358ce6d-9826-4ddb-8c2f-51bac8db59d4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1100.166071] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 4e346ed1-36e9-421d-975f-e8bb6f05c0a0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1100.166071] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance e1c7d100-4ad7-4871-970f-bb7562bfc6fc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1100.166189] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1100.166279] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance ab365570-ac29-4094-be4c-d49563a465c8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1100.166393] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance c17525ee-d038-4c81-932b-ed74a6de6cb5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1100.166509] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 92c27615-d377-492f-a9db-ff45b2e71537 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1100.166622] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1100.166733] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 011ab7de-98a7-41fc-9e05-e71965c73c09 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1100.166842] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance f8c07fa1-d27c-4d0f-847b-481477cd04bf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1100.166951] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 7f937d89-684b-44f5-9f30-783aeafe99d1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1100.167069] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance ab9d8e3e-65c5-4ac9-920f-3042b8cf2054 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1100.167180] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance c272b0ae-6313-46ab-977c-6de255e77675 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1100.167288] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance d900df05-b65c-4a45-94d1-563afbf9c022 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1100.167396] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 05aae150-5d86-4210-ae7e-8c63e83cb907 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1100.167507] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 42db60d9-e5f7-4925-8f6f-d3884687414a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1100.167714] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Total usable vcpus: 48, total allocated vcpus: 16 {{(pid=69367) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 1100.167851] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3584MB phys_disk=200GB used_disk=16GB total_vcpus=48 used_vcpus=16 pci_stats=[] {{(pid=69367) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 1100.366963] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4db39ea-9b55-44b6-8488-a5d0e5bdbe38 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.375545] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c539356-ca5e-4e28-bdbf-bff31f16ddc1 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.407070] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6e69264-bfca-4254-9ed1-8b65f9339f7d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.415505] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-661a8208-8681-4fa9-bf90-57f8484d0847 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.429884] env[69367]: DEBUG nova.compute.provider_tree [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Inventory has not changed in ProviderTree for provider: 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1100.933921] env[69367]: DEBUG nova.scheduler.client.report [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Inventory has not changed for provider 19ddf8be-7305-4f70-8366-52a9957232e6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1101.440504] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69367) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1097}} [ 1101.440959] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.311s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1147.088452] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1147.088863] env[69367]: DEBUG nova.compute.manager [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Cleaning up deleted instances {{(pid=69367) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11864}} [ 1147.594645] env[69367]: DEBUG nova.compute.manager [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] There are 11 instances to clean {{(pid=69367) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11873}} [ 1147.594904] env[69367]: DEBUG nova.compute.manager [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] [instance: 5108cc8a-2c5f-4f1a-b114-d932d0f3e11e] Instance has had 0 of 5 cleanup attempts {{(pid=69367) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11877}} [ 1148.099042] env[69367]: DEBUG nova.compute.manager [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] [instance: f7d6c2a8-a5ed-4c91-aef4-58d3ecfd10b3] Instance has had 0 of 5 cleanup attempts {{(pid=69367) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11877}} [ 1148.602829] env[69367]: DEBUG nova.compute.manager [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] [instance: 652e2e23-7927-46ce-b8af-fffdb6ac8a3e] Instance has had 0 of 5 cleanup attempts {{(pid=69367) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11877}} [ 1149.106494] env[69367]: DEBUG nova.compute.manager [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] [instance: 54a1f586-481d-427e-ba0b-be90e5573bd3] Instance has had 0 of 5 cleanup attempts {{(pid=69367) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11877}} [ 1149.610423] env[69367]: DEBUG nova.compute.manager [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] [instance: 46b6bc45-57f0-4850-9249-6bbb22b162c6] Instance has had 0 of 5 cleanup attempts {{(pid=69367) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11877}} [ 1150.115069] env[69367]: DEBUG nova.compute.manager [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] [instance: 4a46d003-f57e-4089-aa60-757a4246f071] Instance has had 0 of 5 cleanup attempts {{(pid=69367) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11877}} [ 1150.618772] env[69367]: DEBUG nova.compute.manager [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] [instance: 95efcff3-a81b-49fb-b85a-dae060c023b2] Instance has had 0 of 5 cleanup attempts {{(pid=69367) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11877}} [ 1151.122694] env[69367]: DEBUG nova.compute.manager [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] [instance: 48470f96-56d2-4ca2-8078-c5ff4f6db71b] Instance has had 0 of 5 cleanup attempts {{(pid=69367) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11877}} [ 1151.625721] env[69367]: DEBUG nova.compute.manager [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] [instance: 557dc011-44a1-4240-9596-d055d57e176f] Instance has had 0 of 5 cleanup attempts {{(pid=69367) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11877}} [ 1152.129084] env[69367]: DEBUG nova.compute.manager [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] [instance: 837b4093-308b-440b-940d-fc0227a5c590] Instance has had 0 of 5 cleanup attempts {{(pid=69367) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11877}} [ 1152.632548] env[69367]: DEBUG nova.compute.manager [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] [instance: d2f8328d-fd05-4e63-9cbd-a6e3ec948964] Instance has had 0 of 5 cleanup attempts {{(pid=69367) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11877}} [ 1153.136343] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1153.136624] env[69367]: DEBUG nova.compute.manager [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Cleaning up deleted instances with incomplete migration {{(pid=69367) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11902}} [ 1153.639626] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1157.142077] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1157.142077] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1157.649673] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1157.649885] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1157.650013] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1157.650176] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1157.650342] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1157.650501] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1158.154122] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1158.154522] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1158.154679] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1158.154756] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69367) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 1158.155672] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd0bdc59-5b28-486d-a16f-09d51aed40ee {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.164273] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eaffc55-f150-436c-8a60-ab399d3978a4 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.178504] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1311af8c-ed3e-49d1-859d-04782e312f1b {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.185328] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-001e900f-13df-47e7-b4d2-b6ccd892eb02 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.213765] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180344MB free_disk=1GB free_vcpus=48 pci_devices=None {{(pid=69367) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 1158.213953] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1158.214155] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1159.391814] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance a358ce6d-9826-4ddb-8c2f-51bac8db59d4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1159.392132] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 4e346ed1-36e9-421d-975f-e8bb6f05c0a0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1159.392199] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance e1c7d100-4ad7-4871-970f-bb7562bfc6fc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1159.392297] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1159.392417] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance ab365570-ac29-4094-be4c-d49563a465c8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1159.392535] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance c17525ee-d038-4c81-932b-ed74a6de6cb5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1159.392652] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 92c27615-d377-492f-a9db-ff45b2e71537 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1159.392767] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1159.392881] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 011ab7de-98a7-41fc-9e05-e71965c73c09 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1159.392995] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance f8c07fa1-d27c-4d0f-847b-481477cd04bf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1159.393122] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 7f937d89-684b-44f5-9f30-783aeafe99d1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1159.393233] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance ab9d8e3e-65c5-4ac9-920f-3042b8cf2054 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1159.393343] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance c272b0ae-6313-46ab-977c-6de255e77675 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1159.393452] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance d900df05-b65c-4a45-94d1-563afbf9c022 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1159.393598] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 05aae150-5d86-4210-ae7e-8c63e83cb907 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1159.393752] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 42db60d9-e5f7-4925-8f6f-d3884687414a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1159.394012] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Total usable vcpus: 48, total allocated vcpus: 16 {{(pid=69367) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 1159.394166] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3584MB phys_disk=200GB used_disk=16GB total_vcpus=48 used_vcpus=16 pci_stats=[] {{(pid=69367) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 1159.569968] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57ceb997-b4f9-44c2-91ff-e2d77bce230c {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.578117] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f385c66-9a52-477b-be09-faece3cef195 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.608761] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34727b71-318b-431e-a756-97349bf735d5 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.616265] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fb9fea3-89d3-4949-9be5-56994e53ee33 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.629426] env[69367]: DEBUG nova.compute.provider_tree [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Inventory has not changed in ProviderTree for provider: 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1160.132962] env[69367]: DEBUG nova.scheduler.client.report [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Inventory has not changed for provider 19ddf8be-7305-4f70-8366-52a9957232e6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1160.134273] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69367) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1097}} [ 1160.134460] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.920s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1160.571293] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1160.571646] env[69367]: DEBUG nova.compute.manager [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69367) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11183}} [ 1193.131558] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._sync_power_states {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1193.637467] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Getting list of instances from cluster (obj){ [ 1193.637467] env[69367]: value = "domain-c8" [ 1193.637467] env[69367]: _type = "ClusterComputeResource" [ 1193.637467] env[69367]: } {{(pid=69367) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1193.638513] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-776a0dac-4151-4cf2-88d2-d2d24730fa54 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.648942] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Got total of 1 instances {{(pid=69367) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1193.649109] env[69367]: WARNING nova.compute.manager [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] While synchronizing instance power states, found 16 instances in the database and 1 instances on the hypervisor. [ 1193.649249] env[69367]: DEBUG nova.compute.manager [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Triggering sync for uuid a358ce6d-9826-4ddb-8c2f-51bac8db59d4 {{(pid=69367) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10975}} [ 1193.649532] env[69367]: DEBUG nova.compute.manager [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Triggering sync for uuid 4e346ed1-36e9-421d-975f-e8bb6f05c0a0 {{(pid=69367) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10975}} [ 1193.649755] env[69367]: DEBUG nova.compute.manager [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Triggering sync for uuid e1c7d100-4ad7-4871-970f-bb7562bfc6fc {{(pid=69367) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10975}} [ 1193.649955] env[69367]: DEBUG nova.compute.manager [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Triggering sync for uuid 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9 {{(pid=69367) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10975}} [ 1193.650171] env[69367]: DEBUG nova.compute.manager [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Triggering sync for uuid ab365570-ac29-4094-be4c-d49563a465c8 {{(pid=69367) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10975}} [ 1193.650365] env[69367]: DEBUG nova.compute.manager [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Triggering sync for uuid c17525ee-d038-4c81-932b-ed74a6de6cb5 {{(pid=69367) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10975}} [ 1193.650560] env[69367]: DEBUG nova.compute.manager [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Triggering sync for uuid 92c27615-d377-492f-a9db-ff45b2e71537 {{(pid=69367) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10975}} [ 1193.650917] env[69367]: DEBUG nova.compute.manager [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Triggering sync for uuid 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57 {{(pid=69367) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10975}} [ 1193.651132] env[69367]: DEBUG nova.compute.manager [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Triggering sync for uuid 011ab7de-98a7-41fc-9e05-e71965c73c09 {{(pid=69367) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10975}} [ 1193.651331] env[69367]: DEBUG nova.compute.manager [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Triggering sync for uuid f8c07fa1-d27c-4d0f-847b-481477cd04bf {{(pid=69367) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10975}} [ 1193.651562] env[69367]: DEBUG nova.compute.manager [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Triggering sync for uuid 7f937d89-684b-44f5-9f30-783aeafe99d1 {{(pid=69367) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10975}} [ 1193.651766] env[69367]: DEBUG nova.compute.manager [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Triggering sync for uuid ab9d8e3e-65c5-4ac9-920f-3042b8cf2054 {{(pid=69367) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10975}} [ 1193.651957] env[69367]: DEBUG nova.compute.manager [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Triggering sync for uuid c272b0ae-6313-46ab-977c-6de255e77675 {{(pid=69367) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10975}} [ 1193.652165] env[69367]: DEBUG nova.compute.manager [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Triggering sync for uuid d900df05-b65c-4a45-94d1-563afbf9c022 {{(pid=69367) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10975}} [ 1193.652357] env[69367]: DEBUG nova.compute.manager [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Triggering sync for uuid 05aae150-5d86-4210-ae7e-8c63e83cb907 {{(pid=69367) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10975}} [ 1193.652544] env[69367]: DEBUG nova.compute.manager [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Triggering sync for uuid 42db60d9-e5f7-4925-8f6f-d3884687414a {{(pid=69367) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10975}} [ 1193.652894] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Acquiring lock "a358ce6d-9826-4ddb-8c2f-51bac8db59d4" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1193.653121] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "a358ce6d-9826-4ddb-8c2f-51bac8db59d4" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1193.653394] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Acquiring lock "4e346ed1-36e9-421d-975f-e8bb6f05c0a0" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1193.653584] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "4e346ed1-36e9-421d-975f-e8bb6f05c0a0" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1193.653820] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Acquiring lock "e1c7d100-4ad7-4871-970f-bb7562bfc6fc" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1193.654007] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "e1c7d100-4ad7-4871-970f-bb7562bfc6fc" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1193.654324] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Acquiring lock "3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1193.654435] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1193.654665] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Acquiring lock "ab365570-ac29-4094-be4c-d49563a465c8" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1193.654846] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "ab365570-ac29-4094-be4c-d49563a465c8" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1193.655092] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Acquiring lock "c17525ee-d038-4c81-932b-ed74a6de6cb5" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1193.655276] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "c17525ee-d038-4c81-932b-ed74a6de6cb5" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1193.655532] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Acquiring lock "92c27615-d377-492f-a9db-ff45b2e71537" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1193.655720] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "92c27615-d377-492f-a9db-ff45b2e71537" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1193.656090] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Acquiring lock "10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1193.656279] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1193.656530] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Acquiring lock "011ab7de-98a7-41fc-9e05-e71965c73c09" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1193.656727] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "011ab7de-98a7-41fc-9e05-e71965c73c09" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1193.656970] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Acquiring lock "f8c07fa1-d27c-4d0f-847b-481477cd04bf" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1193.657165] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "f8c07fa1-d27c-4d0f-847b-481477cd04bf" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1193.657688] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Acquiring lock "7f937d89-684b-44f5-9f30-783aeafe99d1" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1193.657891] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "7f937d89-684b-44f5-9f30-783aeafe99d1" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1193.658160] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Acquiring lock "ab9d8e3e-65c5-4ac9-920f-3042b8cf2054" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1193.658345] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "ab9d8e3e-65c5-4ac9-920f-3042b8cf2054" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1193.658598] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Acquiring lock "c272b0ae-6313-46ab-977c-6de255e77675" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1193.658783] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "c272b0ae-6313-46ab-977c-6de255e77675" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1193.659111] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Acquiring lock "d900df05-b65c-4a45-94d1-563afbf9c022" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1193.659300] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "d900df05-b65c-4a45-94d1-563afbf9c022" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1193.659533] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Acquiring lock "05aae150-5d86-4210-ae7e-8c63e83cb907" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1193.659723] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "05aae150-5d86-4210-ae7e-8c63e83cb907" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1193.659966] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Acquiring lock "42db60d9-e5f7-4925-8f6f-d3884687414a" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1193.660159] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "42db60d9-e5f7-4925-8f6f-d3884687414a" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1193.660482] env[69367]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-62b93459-af08-4bd8-bf1f-33057ea7aa3f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.662349] env[69367]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-be137d83-2f23-46c2-8138-d72742d6da02 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.663871] env[69367]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-94824e6f-3793-440f-ac1d-5cb668c7f605 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.666436] env[69367]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-676f8acf-7328-499a-bce3-1a9acc8d7667 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.667882] env[69367]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c5f7003d-766e-4c92-9935-38bcd2737bb0 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.669462] env[69367]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5c9dff70-de37-478e-bde4-17ce3d34ef71 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.671576] env[69367]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bc6cecef-e848-4a1b-b46a-49d2c69194a3 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.673548] env[69367]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f59a50ae-e4d4-4e67-9b8b-efd0bb36608f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.675290] env[69367]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0dd0603b-65e8-415e-a34c-0ca499907af4 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.677161] env[69367]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6318ffa2-219e-4aa2-b581-1ccc9668cd62 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.678926] env[69367]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b45360e7-3f1a-4060-89c9-066cdfb43912 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.680774] env[69367]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-74b71e0e-9c46-43a8-8309-190d166cb6f6 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.682646] env[69367]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2fecb1cc-2fe0-49b0-a264-01343b53103f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.684568] env[69367]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9660bac2-d401-4ad2-8551-c016592755a8 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.686327] env[69367]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-37c79a3b-a0d2-4d75-a6ec-7c8b59767b98 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.688694] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71085d53-39d1-4daa-9a4d-69fecaa55a9e {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.725606] env[69367]: WARNING urllib3.connectionpool [-] Connection pool is full, discarding connection: vc1.osci.c.eu-de-1.cloud.sap. Connection pool size: 10: queue.Full [ 1193.726853] env[69367]: WARNING urllib3.connectionpool [-] Connection pool is full, discarding connection: vc1.osci.c.eu-de-1.cloud.sap. Connection pool size: 10: queue.Full [ 1193.730276] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d164392e-8623-4afa-894e-b7b272963e3d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.743202] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2363ab9b-b301-4cda-ab20-23f396e6a7f1 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.755503] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-675e0a1b-652b-494a-8469-ddc70ce87e36 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.767697] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d8d713c-51bf-4371-a7b9-f1d6bef950f3 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.780615] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-973a21d0-bff7-4a20-afdc-0c7334b6b6ab {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.792750] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4e7da41-5c2e-4e46-b602-999aa6cd353a {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.808522] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7748aff-1d98-45ec-91f1-cadf105b0f5b {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.820787] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25837794-4745-4fb5-9451-81746768d29f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.832898] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-031fd97f-8857-49be-adc3-adab57a0f491 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.845036] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9ffce49-6a88-4839-8f90-08742e26221f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.857082] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b7778fe-547c-448b-8462-d0ec092ac119 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.870142] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e92316d-554e-486b-9c1b-6c58e8d2d953 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.885469] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4252be8c-6c10-40d6-8885-6fe6f750c254 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.898193] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dccc259b-6a71-4756-858b-355807f58567 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.912472] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25d05fcb-d7f0-4b54-bc93-a80f1edc47b2 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.943974] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "42db60d9-e5f7-4925-8f6f-d3884687414a" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.284s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1194.694397] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "4e346ed1-36e9-421d-975f-e8bb6f05c0a0" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 1.041s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1194.694776] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "e1c7d100-4ad7-4871-970f-bb7562bfc6fc" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 1.041s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1194.695078] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "a358ce6d-9826-4ddb-8c2f-51bac8db59d4" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 1.042s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1194.707989] env[69367]: WARNING urllib3.connectionpool [-] Connection pool is full, discarding connection: vc1.osci.c.eu-de-1.cloud.sap. Connection pool size: 10: queue.Full [ 1194.713386] env[69367]: WARNING urllib3.connectionpool [-] Connection pool is full, discarding connection: vc1.osci.c.eu-de-1.cloud.sap. Connection pool size: 10: queue.Full [ 1194.718385] env[69367]: WARNING urllib3.connectionpool [-] Connection pool is full, discarding connection: vc1.osci.c.eu-de-1.cloud.sap. Connection pool size: 10: queue.Full [ 1194.723612] env[69367]: WARNING urllib3.connectionpool [-] Connection pool is full, discarding connection: vc1.osci.c.eu-de-1.cloud.sap. Connection pool size: 10: queue.Full [ 1194.738333] env[69367]: WARNING oslo_messaging._drivers.amqpdriver [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Number of call queues is 11, greater than warning threshold: 10. There could be a leak. Increasing threshold to: 20 [ 1194.739366] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "7f937d89-684b-44f5-9f30-783aeafe99d1" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 1.081s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1194.750696] env[69367]: WARNING urllib3.connectionpool [-] Connection pool is full, discarding connection: vc1.osci.c.eu-de-1.cloud.sap. Connection pool size: 10: queue.Full [ 1195.224396] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "f8c07fa1-d27c-4d0f-847b-481477cd04bf" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 1.567s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1195.241221] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "d900df05-b65c-4a45-94d1-563afbf9c022" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 1.582s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1195.241667] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "92c27615-d377-492f-a9db-ff45b2e71537" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 1.586s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1195.251239] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "011ab7de-98a7-41fc-9e05-e71965c73c09" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 1.594s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1195.256789] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "ab365570-ac29-4094-be4c-d49563a465c8" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 1.602s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1195.258282] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 1.604s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1195.259740] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "ab9d8e3e-65c5-4ac9-920f-3042b8cf2054" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 1.601s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1195.260091] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 1.604s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1195.261545] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "05aae150-5d86-4210-ae7e-8c63e83cb907" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 1.602s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1195.261880] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "c272b0ae-6313-46ab-977c-6de255e77675" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 1.603s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1195.262266] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "c17525ee-d038-4c81-932b-ed74a6de6cb5" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 1.607s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1211.616244] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1212.082634] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1213.087216] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1213.087667] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1214.087967] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1216.086619] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1216.086904] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1216.591841] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1216.592161] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1216.592352] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1216.592512] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69367) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 1216.593434] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f867712b-07aa-4c44-8d37-e041bbcc5793 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.602497] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-173676b8-c460-4d58-956c-f38c29468e10 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.616821] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f5012ff-f5e2-4ec5-a79b-00a704b21417 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.623576] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4779c7fe-5eb9-4c12-af95-2a1d663bcff6 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.651874] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180336MB free_disk=1GB free_vcpus=48 pci_devices=None {{(pid=69367) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 1216.652060] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1216.652246] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1217.681093] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance a358ce6d-9826-4ddb-8c2f-51bac8db59d4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1217.681369] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 4e346ed1-36e9-421d-975f-e8bb6f05c0a0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1217.681369] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance e1c7d100-4ad7-4871-970f-bb7562bfc6fc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1217.681492] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1217.681608] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance ab365570-ac29-4094-be4c-d49563a465c8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1217.681723] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance c17525ee-d038-4c81-932b-ed74a6de6cb5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1217.681835] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 92c27615-d377-492f-a9db-ff45b2e71537 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1217.681945] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1217.682068] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 011ab7de-98a7-41fc-9e05-e71965c73c09 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1217.682182] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance f8c07fa1-d27c-4d0f-847b-481477cd04bf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1217.682292] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 7f937d89-684b-44f5-9f30-783aeafe99d1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1217.682401] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance ab9d8e3e-65c5-4ac9-920f-3042b8cf2054 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1217.682510] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance c272b0ae-6313-46ab-977c-6de255e77675 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1217.682620] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance d900df05-b65c-4a45-94d1-563afbf9c022 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1217.682730] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 05aae150-5d86-4210-ae7e-8c63e83cb907 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1217.682908] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 42db60d9-e5f7-4925-8f6f-d3884687414a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1217.683079] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Total usable vcpus: 48, total allocated vcpus: 16 {{(pid=69367) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 1217.683215] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3584MB phys_disk=200GB used_disk=16GB total_vcpus=48 used_vcpus=16 pci_stats=[] {{(pid=69367) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 1217.864347] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec38f3e0-6472-4f63-ae79-b4ee1a3a662b {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.872303] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fab6638d-97ef-4e7c-b440-fa0a1578aba7 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.903360] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-037d91b3-0377-4511-9e74-e53475ea03db {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.911540] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ee4ae3f-3f39-46b3-b37a-63127d44d2e2 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.924917] env[69367]: DEBUG nova.compute.provider_tree [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Inventory has not changed in ProviderTree for provider: 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1218.428776] env[69367]: DEBUG nova.scheduler.client.report [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Inventory has not changed for provider 19ddf8be-7305-4f70-8366-52a9957232e6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1218.430116] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69367) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1097}} [ 1218.430305] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.778s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1221.431452] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1221.431841] env[69367]: DEBUG nova.compute.manager [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69367) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11183}} [ 1271.088922] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1274.081885] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1274.086683] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1274.086889] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1274.087064] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1276.082685] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1276.592082] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1278.086531] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1278.589958] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1278.590258] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1278.590432] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1278.590614] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69367) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 1278.591554] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5d0ab6f-28bf-4d29-b3af-a94cf8036342 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.600910] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee5c51c3-ce90-4c0e-856b-480930ad7cd1 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.615122] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fac1722c-3c0f-459c-92be-2cb5dd1c61d1 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.621660] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-258096cc-f7fc-489b-b31b-d72e9f535c3e {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.649646] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180343MB free_disk=1GB free_vcpus=48 pci_devices=None {{(pid=69367) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 1278.649796] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1278.650013] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1279.680595] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance a358ce6d-9826-4ddb-8c2f-51bac8db59d4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1279.680933] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 4e346ed1-36e9-421d-975f-e8bb6f05c0a0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1279.680933] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance e1c7d100-4ad7-4871-970f-bb7562bfc6fc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1279.681069] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1279.681155] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance ab365570-ac29-4094-be4c-d49563a465c8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1279.681273] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance c17525ee-d038-4c81-932b-ed74a6de6cb5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1279.681391] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 92c27615-d377-492f-a9db-ff45b2e71537 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1279.681506] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1279.681617] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 011ab7de-98a7-41fc-9e05-e71965c73c09 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1279.681728] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance f8c07fa1-d27c-4d0f-847b-481477cd04bf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1279.681839] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 7f937d89-684b-44f5-9f30-783aeafe99d1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1279.681949] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance ab9d8e3e-65c5-4ac9-920f-3042b8cf2054 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1279.682074] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance c272b0ae-6313-46ab-977c-6de255e77675 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1279.682189] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance d900df05-b65c-4a45-94d1-563afbf9c022 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1279.682301] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 05aae150-5d86-4210-ae7e-8c63e83cb907 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1279.682413] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 42db60d9-e5f7-4925-8f6f-d3884687414a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1279.682622] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Total usable vcpus: 48, total allocated vcpus: 16 {{(pid=69367) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 1279.682759] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3584MB phys_disk=200GB used_disk=16GB total_vcpus=48 used_vcpus=16 pci_stats=[] {{(pid=69367) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 1279.856478] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dab6ede-dccb-42d7-9876-b27cb55d9b1a {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.864067] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9e85e81-637e-4a7a-9183-c8e39918e7e1 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.893313] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e5832f9-50fa-4631-ab62-9db25ca922ba {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.900580] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64599d61-92ea-4b22-afae-a76950aa7bd6 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.913537] env[69367]: DEBUG nova.compute.provider_tree [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Inventory has not changed in ProviderTree for provider: 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1280.416796] env[69367]: DEBUG nova.scheduler.client.report [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Inventory has not changed for provider 19ddf8be-7305-4f70-8366-52a9957232e6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1280.418147] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69367) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1097}} [ 1280.418352] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.768s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1283.420642] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1283.421079] env[69367]: DEBUG nova.compute.manager [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69367) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11183}} [ 1333.087723] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1334.500170] env[69367]: DEBUG oslo_concurrency.lockutils [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Acquiring lock "96c3ea0d-8912-4877-b834-3ea3ee904b80" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1334.500472] env[69367]: DEBUG oslo_concurrency.lockutils [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Lock "96c3ea0d-8912-4877-b834-3ea3ee904b80" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1335.003256] env[69367]: DEBUG nova.compute.manager [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Starting instance... {{(pid=69367) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1335.086454] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1335.086728] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1335.522719] env[69367]: DEBUG oslo_concurrency.lockutils [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1335.523033] env[69367]: DEBUG oslo_concurrency.lockutils [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1335.524594] env[69367]: INFO nova.compute.claims [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1336.081699] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1336.086461] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1336.548501] env[69367]: DEBUG nova.scheduler.client.report [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1336.561309] env[69367]: DEBUG nova.scheduler.client.report [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1336.561571] env[69367]: DEBUG nova.compute.provider_tree [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1336.572587] env[69367]: DEBUG nova.scheduler.client.report [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1336.590022] env[69367]: DEBUG nova.scheduler.client.report [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1336.769968] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-963e1d98-186b-4d73-b9ec-75f789d4352f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.777373] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86ffde4b-1a59-4b3b-bb2c-cc2b0cb7757b {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.807610] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2362842-ddef-4b27-a589-29ddb3df4ccd {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.814725] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d726be8-7af4-4014-84ed-f0075b4b653d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.827741] env[69367]: DEBUG nova.compute.provider_tree [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Inventory has not changed in ProviderTree for provider: 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1337.087155] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1337.331337] env[69367]: DEBUG nova.scheduler.client.report [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Inventory has not changed for provider 19ddf8be-7305-4f70-8366-52a9957232e6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1337.836676] env[69367]: DEBUG oslo_concurrency.lockutils [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.313s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1337.837417] env[69367]: DEBUG nova.compute.manager [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Start building networks asynchronously for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1338.342874] env[69367]: DEBUG nova.compute.utils [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Using /dev/sd instead of None {{(pid=69367) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1338.344312] env[69367]: DEBUG nova.compute.manager [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Allocating IP information in the background. {{(pid=69367) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1338.344509] env[69367]: DEBUG nova.network.neutron [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] allocate_for_instance() {{(pid=69367) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1338.420182] env[69367]: DEBUG nova.policy [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cd6690f13e33403c982f7ea1d4ead519', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '26a89ab4163e4b9a801dcbf11c953cf3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69367) authorize /opt/stack/nova/nova/policy.py:192}} [ 1338.700491] env[69367]: DEBUG nova.network.neutron [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Successfully created port: 81716a44-ac7c-4a0a-92b5-533f7e8af4fd {{(pid=69367) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1338.848506] env[69367]: DEBUG nova.compute.manager [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Start building block device mappings for instance. {{(pid=69367) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1339.087071] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1339.590655] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1339.590918] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1339.591116] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1339.591306] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69367) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 1339.592265] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fdacaab-ab9f-4c3f-921d-b1ead953b665 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.601432] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b93e9a50-a62e-4e42-ab72-ae452e47e10e {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.617605] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d913aee-127b-467f-a0fa-134267710926 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.625288] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-852a52b5-c4b7-42ad-acb8-01f78eee0079 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.654974] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180347MB free_disk=1GB free_vcpus=48 pci_devices=None {{(pid=69367) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 1339.655194] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1339.655390] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1339.859846] env[69367]: DEBUG nova.compute.manager [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Start spawning the instance on the hypervisor. {{(pid=69367) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1339.884682] env[69367]: DEBUG nova.virt.hardware [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-19T12:49:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-19T12:49:28Z,direct_url=,disk_format='vmdk',id=2b099420-9152-4d93-9609-4c9317824c11,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cd7c200d5cd6461fb951580f8c764c42',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-19T12:49:29Z,virtual_size=,visibility=), allow threads: False {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1339.884981] env[69367]: DEBUG nova.virt.hardware [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Flavor limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1339.885163] env[69367]: DEBUG nova.virt.hardware [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Image limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1339.885351] env[69367]: DEBUG nova.virt.hardware [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Flavor pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1339.885531] env[69367]: DEBUG nova.virt.hardware [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Image pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1339.885779] env[69367]: DEBUG nova.virt.hardware [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1339.886083] env[69367]: DEBUG nova.virt.hardware [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1339.886296] env[69367]: DEBUG nova.virt.hardware [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1339.886490] env[69367]: DEBUG nova.virt.hardware [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Got 1 possible topologies {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1339.886664] env[69367]: DEBUG nova.virt.hardware [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1339.886855] env[69367]: DEBUG nova.virt.hardware [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1339.887700] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1b64707-dfc0-47b8-8378-743991d0949f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.896623] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aad05427-04cf-4320-913a-6562d8f8469f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.101845] env[69367]: DEBUG nova.compute.manager [req-2a3c0c6c-b9b3-46fd-8de9-586bbb2866c1 req-5ddadf01-a054-48e3-b277-a9b303d9c6a0 service nova] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Received event network-vif-plugged-81716a44-ac7c-4a0a-92b5-533f7e8af4fd {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 1340.102095] env[69367]: DEBUG oslo_concurrency.lockutils [req-2a3c0c6c-b9b3-46fd-8de9-586bbb2866c1 req-5ddadf01-a054-48e3-b277-a9b303d9c6a0 service nova] Acquiring lock "96c3ea0d-8912-4877-b834-3ea3ee904b80-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1340.102322] env[69367]: DEBUG oslo_concurrency.lockutils [req-2a3c0c6c-b9b3-46fd-8de9-586bbb2866c1 req-5ddadf01-a054-48e3-b277-a9b303d9c6a0 service nova] Lock "96c3ea0d-8912-4877-b834-3ea3ee904b80-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1340.102522] env[69367]: DEBUG oslo_concurrency.lockutils [req-2a3c0c6c-b9b3-46fd-8de9-586bbb2866c1 req-5ddadf01-a054-48e3-b277-a9b303d9c6a0 service nova] Lock "96c3ea0d-8912-4877-b834-3ea3ee904b80-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1340.103093] env[69367]: DEBUG nova.compute.manager [req-2a3c0c6c-b9b3-46fd-8de9-586bbb2866c1 req-5ddadf01-a054-48e3-b277-a9b303d9c6a0 service nova] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] No waiting events found dispatching network-vif-plugged-81716a44-ac7c-4a0a-92b5-533f7e8af4fd {{(pid=69367) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1340.103315] env[69367]: WARNING nova.compute.manager [req-2a3c0c6c-b9b3-46fd-8de9-586bbb2866c1 req-5ddadf01-a054-48e3-b277-a9b303d9c6a0 service nova] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Received unexpected event network-vif-plugged-81716a44-ac7c-4a0a-92b5-533f7e8af4fd for instance with vm_state building and task_state spawning. [ 1340.157789] env[69367]: DEBUG nova.network.neutron [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Successfully updated port: 81716a44-ac7c-4a0a-92b5-533f7e8af4fd {{(pid=69367) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1340.669548] env[69367]: DEBUG oslo_concurrency.lockutils [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Acquiring lock "refresh_cache-96c3ea0d-8912-4877-b834-3ea3ee904b80" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1340.669743] env[69367]: DEBUG oslo_concurrency.lockutils [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Acquired lock "refresh_cache-96c3ea0d-8912-4877-b834-3ea3ee904b80" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1340.669934] env[69367]: DEBUG nova.network.neutron [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1340.694914] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance a358ce6d-9826-4ddb-8c2f-51bac8db59d4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1340.695100] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 4e346ed1-36e9-421d-975f-e8bb6f05c0a0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1340.695231] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance e1c7d100-4ad7-4871-970f-bb7562bfc6fc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1340.695353] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1340.695472] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance ab365570-ac29-4094-be4c-d49563a465c8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1340.695588] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance c17525ee-d038-4c81-932b-ed74a6de6cb5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1340.695703] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 92c27615-d377-492f-a9db-ff45b2e71537 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1340.695815] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1340.695928] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 011ab7de-98a7-41fc-9e05-e71965c73c09 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1340.696051] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance f8c07fa1-d27c-4d0f-847b-481477cd04bf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1340.696170] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 7f937d89-684b-44f5-9f30-783aeafe99d1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1340.696281] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance ab9d8e3e-65c5-4ac9-920f-3042b8cf2054 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1340.696393] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance c272b0ae-6313-46ab-977c-6de255e77675 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1340.696501] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance d900df05-b65c-4a45-94d1-563afbf9c022 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1340.696608] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 05aae150-5d86-4210-ae7e-8c63e83cb907 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1340.696716] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 42db60d9-e5f7-4925-8f6f-d3884687414a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1340.696837] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 96c3ea0d-8912-4877-b834-3ea3ee904b80 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1340.697062] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Total usable vcpus: 48, total allocated vcpus: 17 {{(pid=69367) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 1340.697204] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3776MB phys_disk=200GB used_disk=17GB total_vcpus=48 used_vcpus=17 pci_stats=[] {{(pid=69367) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 1340.909224] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3733edc0-8580-4863-8e4a-02590c5e2efc {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.919079] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf6d1457-adc7-4a43-8a33-357a8abfcde7 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.950375] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4af81a6-3177-415a-83d3-4fcf0ebf4ea9 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.958023] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41d0acaa-62b5-465c-bfb0-39a8b34f7c9f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.971854] env[69367]: DEBUG nova.compute.provider_tree [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Inventory has not changed in ProviderTree for provider: 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1341.206613] env[69367]: DEBUG nova.network.neutron [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Instance cache missing network info. {{(pid=69367) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1341.339524] env[69367]: DEBUG nova.network.neutron [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Updating instance_info_cache with network_info: [{"id": "81716a44-ac7c-4a0a-92b5-533f7e8af4fd", "address": "fa:16:3e:a1:60:dc", "network": {"id": "0b0a82c2-1a70-4174-a75e-5863d3505b2c", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1091682254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "26a89ab4163e4b9a801dcbf11c953cf3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap81716a44-ac", "ovs_interfaceid": "81716a44-ac7c-4a0a-92b5-533f7e8af4fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1341.474889] env[69367]: DEBUG nova.scheduler.client.report [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Inventory has not changed for provider 19ddf8be-7305-4f70-8366-52a9957232e6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1341.842875] env[69367]: DEBUG oslo_concurrency.lockutils [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Releasing lock "refresh_cache-96c3ea0d-8912-4877-b834-3ea3ee904b80" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1341.843294] env[69367]: DEBUG nova.compute.manager [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Instance network_info: |[{"id": "81716a44-ac7c-4a0a-92b5-533f7e8af4fd", "address": "fa:16:3e:a1:60:dc", "network": {"id": "0b0a82c2-1a70-4174-a75e-5863d3505b2c", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1091682254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "26a89ab4163e4b9a801dcbf11c953cf3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap81716a44-ac", "ovs_interfaceid": "81716a44-ac7c-4a0a-92b5-533f7e8af4fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69367) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1341.843804] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a1:60:dc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '92fe29b3-0907-453d-aabb-5559c4bd7c0f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '81716a44-ac7c-4a0a-92b5-533f7e8af4fd', 'vif_model': 'vmxnet3'}] {{(pid=69367) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1341.851416] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1341.851625] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Creating VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1341.851854] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-20575129-5b94-4763-8313-be487ad7e808 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.873050] env[69367]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1341.873050] env[69367]: value = "task-4234177" [ 1341.873050] env[69367]: _type = "Task" [ 1341.873050] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1341.881398] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234177, 'name': CreateVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1341.980579] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69367) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1097}} [ 1341.980970] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.325s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1342.131467] env[69367]: DEBUG nova.compute.manager [req-c3b981ac-866c-493b-b960-dc8976dd1c67 req-96a65636-cea7-4f38-aab9-5b80d3d47e6e service nova] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Received event network-changed-81716a44-ac7c-4a0a-92b5-533f7e8af4fd {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 1342.131675] env[69367]: DEBUG nova.compute.manager [req-c3b981ac-866c-493b-b960-dc8976dd1c67 req-96a65636-cea7-4f38-aab9-5b80d3d47e6e service nova] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Refreshing instance network info cache due to event network-changed-81716a44-ac7c-4a0a-92b5-533f7e8af4fd. {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11772}} [ 1342.131943] env[69367]: DEBUG oslo_concurrency.lockutils [req-c3b981ac-866c-493b-b960-dc8976dd1c67 req-96a65636-cea7-4f38-aab9-5b80d3d47e6e service nova] Acquiring lock "refresh_cache-96c3ea0d-8912-4877-b834-3ea3ee904b80" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1342.132131] env[69367]: DEBUG oslo_concurrency.lockutils [req-c3b981ac-866c-493b-b960-dc8976dd1c67 req-96a65636-cea7-4f38-aab9-5b80d3d47e6e service nova] Acquired lock "refresh_cache-96c3ea0d-8912-4877-b834-3ea3ee904b80" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1342.132266] env[69367]: DEBUG nova.network.neutron [req-c3b981ac-866c-493b-b960-dc8976dd1c67 req-96a65636-cea7-4f38-aab9-5b80d3d47e6e service nova] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Refreshing network info cache for port 81716a44-ac7c-4a0a-92b5-533f7e8af4fd {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1342.382948] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234177, 'name': CreateVM_Task, 'duration_secs': 0.314255} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1342.383214] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Created VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1342.390373] env[69367]: DEBUG oslo_concurrency.lockutils [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1342.390582] env[69367]: DEBUG oslo_concurrency.lockutils [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1342.390925] env[69367]: DEBUG oslo_concurrency.lockutils [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1342.391196] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8141734a-238a-4992-afbb-81b12f1c39ec {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.396340] env[69367]: DEBUG oslo_vmware.api [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Waiting for the task: (returnval){ [ 1342.396340] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]5288d589-27e1-17ca-979b-c3ce12777640" [ 1342.396340] env[69367]: _type = "Task" [ 1342.396340] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1342.404923] env[69367]: DEBUG oslo_vmware.api [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]5288d589-27e1-17ca-979b-c3ce12777640, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1342.809285] env[69367]: DEBUG nova.network.neutron [req-c3b981ac-866c-493b-b960-dc8976dd1c67 req-96a65636-cea7-4f38-aab9-5b80d3d47e6e service nova] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Updated VIF entry in instance network info cache for port 81716a44-ac7c-4a0a-92b5-533f7e8af4fd. {{(pid=69367) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1342.809679] env[69367]: DEBUG nova.network.neutron [req-c3b981ac-866c-493b-b960-dc8976dd1c67 req-96a65636-cea7-4f38-aab9-5b80d3d47e6e service nova] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Updating instance_info_cache with network_info: [{"id": "81716a44-ac7c-4a0a-92b5-533f7e8af4fd", "address": "fa:16:3e:a1:60:dc", "network": {"id": "0b0a82c2-1a70-4174-a75e-5863d3505b2c", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1091682254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "26a89ab4163e4b9a801dcbf11c953cf3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap81716a44-ac", "ovs_interfaceid": "81716a44-ac7c-4a0a-92b5-533f7e8af4fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1342.906799] env[69367]: DEBUG oslo_vmware.api [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]5288d589-27e1-17ca-979b-c3ce12777640, 'name': SearchDatastore_Task, 'duration_secs': 0.010333} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1342.907134] env[69367]: DEBUG oslo_concurrency.lockutils [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1342.907376] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Processing image 2b099420-9152-4d93-9609-4c9317824c11 {{(pid=69367) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1342.907624] env[69367]: DEBUG oslo_concurrency.lockutils [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1342.907769] env[69367]: DEBUG oslo_concurrency.lockutils [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1342.907949] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1342.908221] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f74c0684-116b-4d99-969e-4fcfdae10d63 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.916266] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1342.916402] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69367) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1342.917140] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e50be9f4-8f4b-4775-b252-a93715a240a3 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.922406] env[69367]: DEBUG oslo_vmware.api [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Waiting for the task: (returnval){ [ 1342.922406] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]527ea7a0-e002-bad2-29a5-6575baf29522" [ 1342.922406] env[69367]: _type = "Task" [ 1342.922406] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1342.930168] env[69367]: DEBUG oslo_vmware.api [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]527ea7a0-e002-bad2-29a5-6575baf29522, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1343.312925] env[69367]: DEBUG oslo_concurrency.lockutils [req-c3b981ac-866c-493b-b960-dc8976dd1c67 req-96a65636-cea7-4f38-aab9-5b80d3d47e6e service nova] Releasing lock "refresh_cache-96c3ea0d-8912-4877-b834-3ea3ee904b80" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1343.433305] env[69367]: DEBUG oslo_vmware.api [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]527ea7a0-e002-bad2-29a5-6575baf29522, 'name': SearchDatastore_Task, 'duration_secs': 0.008743} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1343.434128] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b6545b52-b8c7-4497-bc36-b1efed536d98 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.439436] env[69367]: DEBUG oslo_vmware.api [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Waiting for the task: (returnval){ [ 1343.439436] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52ad7862-36d0-7330-43b9-e85a2c04e3d2" [ 1343.439436] env[69367]: _type = "Task" [ 1343.439436] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1343.447230] env[69367]: DEBUG oslo_vmware.api [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52ad7862-36d0-7330-43b9-e85a2c04e3d2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1343.950128] env[69367]: DEBUG oslo_vmware.api [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]52ad7862-36d0-7330-43b9-e85a2c04e3d2, 'name': SearchDatastore_Task, 'duration_secs': 0.009177} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1343.950365] env[69367]: DEBUG oslo_concurrency.lockutils [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1343.950642] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore2] 96c3ea0d-8912-4877-b834-3ea3ee904b80/96c3ea0d-8912-4877-b834-3ea3ee904b80.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1343.950906] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7a749597-3685-4bf5-887d-5e798fd0ab12 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.957524] env[69367]: DEBUG oslo_vmware.api [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Waiting for the task: (returnval){ [ 1343.957524] env[69367]: value = "task-4234178" [ 1343.957524] env[69367]: _type = "Task" [ 1343.957524] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1343.965761] env[69367]: DEBUG oslo_vmware.api [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4234178, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1344.467025] env[69367]: DEBUG oslo_vmware.api [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4234178, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.447044} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1344.467363] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2b099420-9152-4d93-9609-4c9317824c11/2b099420-9152-4d93-9609-4c9317824c11.vmdk to [datastore2] 96c3ea0d-8912-4877-b834-3ea3ee904b80/96c3ea0d-8912-4877-b834-3ea3ee904b80.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1344.467566] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Extending root virtual disk to 1048576 {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1344.467780] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f05ebf92-0f91-44f3-8691-99530391b45d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.474555] env[69367]: DEBUG oslo_vmware.api [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Waiting for the task: (returnval){ [ 1344.474555] env[69367]: value = "task-4234179" [ 1344.474555] env[69367]: _type = "Task" [ 1344.474555] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1344.481968] env[69367]: DEBUG oslo_vmware.api [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4234179, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1344.980680] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1344.980877] env[69367]: DEBUG nova.compute.manager [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69367) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11183}} [ 1344.984151] env[69367]: DEBUG oslo_vmware.api [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4234179, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063427} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1344.984404] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Extended root virtual disk {{(pid=69367) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1344.985168] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fef6b554-221e-46c4-bfa3-87976cc3fa1c {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.008693] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Reconfiguring VM instance instance-0000006e to attach disk [datastore2] 96c3ea0d-8912-4877-b834-3ea3ee904b80/96c3ea0d-8912-4877-b834-3ea3ee904b80.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1345.008982] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-658f04f4-04f3-42b5-8b4d-31e262d4de63 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.029881] env[69367]: DEBUG oslo_vmware.api [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Waiting for the task: (returnval){ [ 1345.029881] env[69367]: value = "task-4234180" [ 1345.029881] env[69367]: _type = "Task" [ 1345.029881] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1345.038161] env[69367]: DEBUG oslo_vmware.api [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4234180, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1345.539959] env[69367]: DEBUG oslo_vmware.api [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4234180, 'name': ReconfigVM_Task, 'duration_secs': 0.26322} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1345.540365] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Reconfigured VM instance instance-0000006e to attach disk [datastore2] 96c3ea0d-8912-4877-b834-3ea3ee904b80/96c3ea0d-8912-4877-b834-3ea3ee904b80.vmdk or device None with type sparse {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1345.540929] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-83a1322e-d6d6-42db-bc8b-8316150e76dc {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.547182] env[69367]: DEBUG oslo_vmware.api [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Waiting for the task: (returnval){ [ 1345.547182] env[69367]: value = "task-4234181" [ 1345.547182] env[69367]: _type = "Task" [ 1345.547182] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1345.555281] env[69367]: DEBUG oslo_vmware.api [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4234181, 'name': Rename_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.057627] env[69367]: DEBUG oslo_vmware.api [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4234181, 'name': Rename_Task, 'duration_secs': 0.133878} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1346.057897] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Powering on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1346.058167] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a24ac3c2-e183-4609-be0b-9a2eda1af10f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.064403] env[69367]: DEBUG oslo_vmware.api [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Waiting for the task: (returnval){ [ 1346.064403] env[69367]: value = "task-4234182" [ 1346.064403] env[69367]: _type = "Task" [ 1346.064403] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.072669] env[69367]: DEBUG oslo_vmware.api [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4234182, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.576067] env[69367]: DEBUG oslo_vmware.api [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4234182, 'name': PowerOnVM_Task, 'duration_secs': 0.460202} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1346.576479] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Powered on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1346.576792] env[69367]: INFO nova.compute.manager [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Took 6.72 seconds to spawn the instance on the hypervisor. [ 1346.577916] env[69367]: DEBUG nova.compute.manager [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Checking state {{(pid=69367) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1346.577916] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95c8927d-0c93-400f-a8b0-1a39225d2a75 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.097817] env[69367]: INFO nova.compute.manager [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Took 11.59 seconds to build instance. [ 1347.542775] env[69367]: DEBUG nova.compute.manager [req-486371c8-8407-446a-bfad-dffb1f229060 req-60ec9254-249f-4080-b44b-d9e81cb03605 service nova] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Received event network-changed-81716a44-ac7c-4a0a-92b5-533f7e8af4fd {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 1347.542890] env[69367]: DEBUG nova.compute.manager [req-486371c8-8407-446a-bfad-dffb1f229060 req-60ec9254-249f-4080-b44b-d9e81cb03605 service nova] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Refreshing instance network info cache due to event network-changed-81716a44-ac7c-4a0a-92b5-533f7e8af4fd. {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11772}} [ 1347.543031] env[69367]: DEBUG oslo_concurrency.lockutils [req-486371c8-8407-446a-bfad-dffb1f229060 req-60ec9254-249f-4080-b44b-d9e81cb03605 service nova] Acquiring lock "refresh_cache-96c3ea0d-8912-4877-b834-3ea3ee904b80" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1347.543285] env[69367]: DEBUG oslo_concurrency.lockutils [req-486371c8-8407-446a-bfad-dffb1f229060 req-60ec9254-249f-4080-b44b-d9e81cb03605 service nova] Acquired lock "refresh_cache-96c3ea0d-8912-4877-b834-3ea3ee904b80" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1347.543465] env[69367]: DEBUG nova.network.neutron [req-486371c8-8407-446a-bfad-dffb1f229060 req-60ec9254-249f-4080-b44b-d9e81cb03605 service nova] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Refreshing network info cache for port 81716a44-ac7c-4a0a-92b5-533f7e8af4fd {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1347.600203] env[69367]: DEBUG oslo_concurrency.lockutils [None req-71920fad-e386-45af-b24b-3144d4cdd21a tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Lock "96c3ea0d-8912-4877-b834-3ea3ee904b80" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.099s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1348.245845] env[69367]: DEBUG nova.network.neutron [req-486371c8-8407-446a-bfad-dffb1f229060 req-60ec9254-249f-4080-b44b-d9e81cb03605 service nova] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Updated VIF entry in instance network info cache for port 81716a44-ac7c-4a0a-92b5-533f7e8af4fd. {{(pid=69367) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1348.246262] env[69367]: DEBUG nova.network.neutron [req-486371c8-8407-446a-bfad-dffb1f229060 req-60ec9254-249f-4080-b44b-d9e81cb03605 service nova] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Updating instance_info_cache with network_info: [{"id": "81716a44-ac7c-4a0a-92b5-533f7e8af4fd", "address": "fa:16:3e:a1:60:dc", "network": {"id": "0b0a82c2-1a70-4174-a75e-5863d3505b2c", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1091682254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.151", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "26a89ab4163e4b9a801dcbf11c953cf3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap81716a44-ac", "ovs_interfaceid": "81716a44-ac7c-4a0a-92b5-533f7e8af4fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1348.749094] env[69367]: DEBUG oslo_concurrency.lockutils [req-486371c8-8407-446a-bfad-dffb1f229060 req-60ec9254-249f-4080-b44b-d9e81cb03605 service nova] Releasing lock "refresh_cache-96c3ea0d-8912-4877-b834-3ea3ee904b80" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1387.138093] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Acquiring lock "96c3ea0d-8912-4877-b834-3ea3ee904b80" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1387.138470] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Lock "96c3ea0d-8912-4877-b834-3ea3ee904b80" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1387.138725] env[69367]: INFO nova.compute.manager [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Shelving [ 1388.149077] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Powering off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1388.149077] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7fb8d367-335c-499b-9ac7-d5bb7d1c04eb {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.160937] env[69367]: DEBUG oslo_vmware.api [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Waiting for the task: (returnval){ [ 1388.160937] env[69367]: value = "task-4234183" [ 1388.160937] env[69367]: _type = "Task" [ 1388.160937] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1388.178183] env[69367]: DEBUG oslo_vmware.api [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4234183, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1388.674364] env[69367]: DEBUG oslo_vmware.api [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4234183, 'name': PowerOffVM_Task, 'duration_secs': 0.206928} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1388.674753] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Powered off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1388.675919] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b150ffe3-9a85-4b9e-b4ac-a7342b76b577 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.697286] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a928eb7e-3a94-4d46-8b72-1db4370d2d65 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.211117] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Creating Snapshot of the VM instance {{(pid=69367) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1389.211117] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-64489cf1-1a0b-4e9d-ad8e-826daf8b8df2 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.218196] env[69367]: DEBUG oslo_vmware.api [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Waiting for the task: (returnval){ [ 1389.218196] env[69367]: value = "task-4234184" [ 1389.218196] env[69367]: _type = "Task" [ 1389.218196] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1389.227110] env[69367]: DEBUG oslo_vmware.api [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4234184, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.730476] env[69367]: DEBUG oslo_vmware.api [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4234184, 'name': CreateSnapshot_Task, 'duration_secs': 0.430867} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1389.730749] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Created Snapshot of the VM instance {{(pid=69367) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1389.731506] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bfe6cde-7635-4a5b-b264-7d260d7b981c {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.249251] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Creating linked-clone VM from snapshot {{(pid=69367) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1390.249719] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-2be686e8-9cec-4f4c-9133-595b37f9e67b {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.259514] env[69367]: DEBUG oslo_vmware.api [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Waiting for the task: (returnval){ [ 1390.259514] env[69367]: value = "task-4234185" [ 1390.259514] env[69367]: _type = "Task" [ 1390.259514] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1390.267868] env[69367]: DEBUG oslo_vmware.api [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4234185, 'name': CloneVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.770426] env[69367]: DEBUG oslo_vmware.api [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4234185, 'name': CloneVM_Task} progress is 94%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.271459] env[69367]: DEBUG oslo_vmware.api [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4234185, 'name': CloneVM_Task, 'duration_secs': 0.934659} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1391.271923] env[69367]: INFO nova.virt.vmwareapi.vmops [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Created linked-clone VM from snapshot [ 1391.272521] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f270f22-b035-4a95-b431-360ab6a33544 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.281201] env[69367]: DEBUG nova.virt.vmwareapi.images [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Uploading image 33ffe0b0-3ccc-4728-af07-217d2bfd5753 {{(pid=69367) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1391.304120] env[69367]: DEBUG oslo_vmware.rw_handles [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1391.304120] env[69367]: value = "vm-837763" [ 1391.304120] env[69367]: _type = "VirtualMachine" [ 1391.304120] env[69367]: }. {{(pid=69367) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1391.304432] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-aefa61f9-6536-41fe-8e12-d5c90fbbd80e {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.312509] env[69367]: DEBUG oslo_vmware.rw_handles [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Lease: (returnval){ [ 1391.312509] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52d82957-8344-92df-ac47-e2a664cf1e89" [ 1391.312509] env[69367]: _type = "HttpNfcLease" [ 1391.312509] env[69367]: } obtained for exporting VM: (result){ [ 1391.312509] env[69367]: value = "vm-837763" [ 1391.312509] env[69367]: _type = "VirtualMachine" [ 1391.312509] env[69367]: }. {{(pid=69367) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1391.312905] env[69367]: DEBUG oslo_vmware.api [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Waiting for the lease: (returnval){ [ 1391.312905] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52d82957-8344-92df-ac47-e2a664cf1e89" [ 1391.312905] env[69367]: _type = "HttpNfcLease" [ 1391.312905] env[69367]: } to be ready. {{(pid=69367) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1391.321086] env[69367]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1391.321086] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52d82957-8344-92df-ac47-e2a664cf1e89" [ 1391.321086] env[69367]: _type = "HttpNfcLease" [ 1391.321086] env[69367]: } is initializing. {{(pid=69367) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1391.822078] env[69367]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1391.822078] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52d82957-8344-92df-ac47-e2a664cf1e89" [ 1391.822078] env[69367]: _type = "HttpNfcLease" [ 1391.822078] env[69367]: } is ready. {{(pid=69367) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1391.822416] env[69367]: DEBUG oslo_vmware.rw_handles [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1391.822416] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52d82957-8344-92df-ac47-e2a664cf1e89" [ 1391.822416] env[69367]: _type = "HttpNfcLease" [ 1391.822416] env[69367]: }. {{(pid=69367) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1391.823116] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce806074-ac01-452d-a06f-0bd8b04d6d32 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.830438] env[69367]: DEBUG oslo_vmware.rw_handles [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52491696-3d33-174e-e683-be1359cba59d/disk-0.vmdk from lease info. {{(pid=69367) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1391.830639] env[69367]: DEBUG oslo_vmware.rw_handles [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52491696-3d33-174e-e683-be1359cba59d/disk-0.vmdk for reading. {{(pid=69367) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1391.915715] env[69367]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-c5d6fd82-9ba1-45df-9599-391f50964fb6 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.088168] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1395.086702] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1396.086387] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1397.082308] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1397.086043] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1399.082171] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1399.590695] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1399.590930] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1400.069259] env[69367]: DEBUG oslo_vmware.rw_handles [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52491696-3d33-174e-e683-be1359cba59d/disk-0.vmdk. {{(pid=69367) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1400.070440] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3ef7e87-3f7d-4b64-8a8d-5d92eead58ad {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.077304] env[69367]: DEBUG oslo_vmware.rw_handles [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52491696-3d33-174e-e683-be1359cba59d/disk-0.vmdk is in state: ready. {{(pid=69367) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1400.077478] env[69367]: ERROR oslo_vmware.rw_handles [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52491696-3d33-174e-e683-be1359cba59d/disk-0.vmdk due to incomplete transfer. [ 1400.077713] env[69367]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-04745288-2e8b-4249-aa47-bd74132d31d1 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.086238] env[69367]: DEBUG oslo_vmware.rw_handles [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52491696-3d33-174e-e683-be1359cba59d/disk-0.vmdk. {{(pid=69367) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1400.086562] env[69367]: DEBUG nova.virt.vmwareapi.images [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Uploaded image 33ffe0b0-3ccc-4728-af07-217d2bfd5753 to the Glance image server {{(pid=69367) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1400.088624] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Destroying the VM {{(pid=69367) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1400.088882] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-57c454a2-bff4-4ee2-a6d4-28b9d2df7eba {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.093577] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1400.093777] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1400.093943] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1400.094108] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69367) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 1400.095993] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b0a9e73-badc-440a-b2d2-033f0a0fb886 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.098751] env[69367]: DEBUG oslo_vmware.api [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Waiting for the task: (returnval){ [ 1400.098751] env[69367]: value = "task-4234187" [ 1400.098751] env[69367]: _type = "Task" [ 1400.098751] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1400.107456] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a522b49-2a1e-42dd-b8ae-ef68cd68f433 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.116393] env[69367]: DEBUG oslo_vmware.api [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4234187, 'name': Destroy_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.127428] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d806d9d6-2562-4512-a15f-e74608a969ca {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.134617] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2aa67ba0-afa8-4c6c-9bb8-dbf9ad55ae92 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.164682] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180348MB free_disk=0GB free_vcpus=48 pci_devices=None {{(pid=69367) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 1400.164857] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1400.165046] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1400.610382] env[69367]: DEBUG oslo_vmware.api [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4234187, 'name': Destroy_Task, 'duration_secs': 0.384647} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1400.610660] env[69367]: INFO nova.virt.vmwareapi.vm_util [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Destroyed the VM [ 1400.610939] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Deleting Snapshot of the VM instance {{(pid=69367) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1400.611214] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-5869d1a6-1238-4647-9d1a-caf3c83d3f38 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.618187] env[69367]: DEBUG oslo_vmware.api [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Waiting for the task: (returnval){ [ 1400.618187] env[69367]: value = "task-4234188" [ 1400.618187] env[69367]: _type = "Task" [ 1400.618187] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1400.626459] env[69367]: DEBUG oslo_vmware.api [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4234188, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.128729] env[69367]: DEBUG oslo_vmware.api [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4234188, 'name': RemoveSnapshot_Task, 'duration_secs': 0.358071} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1401.129080] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Deleted Snapshot of the VM instance {{(pid=69367) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1401.129312] env[69367]: DEBUG nova.compute.manager [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Checking state {{(pid=69367) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1401.130144] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f7464c6-138a-42db-adc0-ba4459fd103b {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.202885] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance a358ce6d-9826-4ddb-8c2f-51bac8db59d4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1401.203066] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 4e346ed1-36e9-421d-975f-e8bb6f05c0a0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1401.203197] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance e1c7d100-4ad7-4871-970f-bb7562bfc6fc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1401.203321] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1401.203450] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance ab365570-ac29-4094-be4c-d49563a465c8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1401.203568] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance c17525ee-d038-4c81-932b-ed74a6de6cb5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1401.203682] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 92c27615-d377-492f-a9db-ff45b2e71537 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1401.203796] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1401.203911] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 011ab7de-98a7-41fc-9e05-e71965c73c09 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1401.204033] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance f8c07fa1-d27c-4d0f-847b-481477cd04bf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1401.204154] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 7f937d89-684b-44f5-9f30-783aeafe99d1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1401.204266] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance ab9d8e3e-65c5-4ac9-920f-3042b8cf2054 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1401.204377] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance c272b0ae-6313-46ab-977c-6de255e77675 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1401.204530] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance d900df05-b65c-4a45-94d1-563afbf9c022 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1401.204588] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 05aae150-5d86-4210-ae7e-8c63e83cb907 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1401.204693] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 42db60d9-e5f7-4925-8f6f-d3884687414a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1401.204839] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 96c3ea0d-8912-4877-b834-3ea3ee904b80 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1401.205097] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Total usable vcpus: 48, total allocated vcpus: 17 {{(pid=69367) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 1401.205245] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3776MB phys_disk=200GB used_disk=17GB total_vcpus=48 used_vcpus=17 pci_stats=[] {{(pid=69367) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 1401.407847] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17716e1d-fcae-409b-9e06-1febb62b1f67 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.418316] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-941ea2ad-c6e6-4d0a-82f0-30cadc4e3362 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.450299] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f852a1f3-7f50-450b-ac8f-34e0e2370796 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.458142] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce99e39b-7f96-4432-8957-4d3fa1e9962f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.471826] env[69367]: DEBUG nova.compute.provider_tree [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1401.642701] env[69367]: INFO nova.compute.manager [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Shelve offloading [ 1401.993135] env[69367]: ERROR nova.scheduler.client.report [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] [req-0490550a-543a-49db-b865-ee8bfb7ba692] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-0490550a-543a-49db-b865-ee8bfb7ba692"}]} [ 1401.993528] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.828s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1401.994158] env[69367]: ERROR nova.compute.manager [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Error updating resources for node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28.: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 1401.994158] env[69367]: ERROR nova.compute.manager Traceback (most recent call last): [ 1401.994158] env[69367]: ERROR nova.compute.manager File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 1401.994158] env[69367]: ERROR nova.compute.manager yield [ 1401.994158] env[69367]: ERROR nova.compute.manager File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 1401.994158] env[69367]: ERROR nova.compute.manager self.set_inventory_for_provider( [ 1401.994158] env[69367]: ERROR nova.compute.manager File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 1401.994158] env[69367]: ERROR nova.compute.manager raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 1401.994158] env[69367]: ERROR nova.compute.manager nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-0490550a-543a-49db-b865-ee8bfb7ba692"}]} [ 1401.994158] env[69367]: ERROR nova.compute.manager [ 1401.994158] env[69367]: ERROR nova.compute.manager During handling of the above exception, another exception occurred: [ 1401.994158] env[69367]: ERROR nova.compute.manager [ 1401.994158] env[69367]: ERROR nova.compute.manager Traceback (most recent call last): [ 1401.994158] env[69367]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 11219, in _update_available_resource_for_node [ 1401.994158] env[69367]: ERROR nova.compute.manager self.rt.update_available_resource(context, nodename, [ 1401.994158] env[69367]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/resource_tracker.py", line 965, in update_available_resource [ 1401.994158] env[69367]: ERROR nova.compute.manager self._update_available_resource(context, resources, startup=startup) [ 1401.994158] env[69367]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 1401.994158] env[69367]: ERROR nova.compute.manager return f(*args, **kwargs) [ 1401.994158] env[69367]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1096, in _update_available_resource [ 1401.994158] env[69367]: ERROR nova.compute.manager self._update(context, cn, startup=startup) [ 1401.994158] env[69367]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 1401.994158] env[69367]: ERROR nova.compute.manager self._update_to_placement(context, compute_node, startup) [ 1401.994158] env[69367]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 1401.994158] env[69367]: ERROR nova.compute.manager return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 1401.994158] env[69367]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 1401.994158] env[69367]: ERROR nova.compute.manager return attempt.get(self._wrap_exception) [ 1401.994158] env[69367]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 1401.994158] env[69367]: ERROR nova.compute.manager six.reraise(self.value[0], self.value[1], self.value[2]) [ 1401.994158] env[69367]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 1401.994158] env[69367]: ERROR nova.compute.manager raise value [ 1401.994158] env[69367]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 1401.994158] env[69367]: ERROR nova.compute.manager attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 1401.994158] env[69367]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 1401.994158] env[69367]: ERROR nova.compute.manager self.reportclient.update_from_provider_tree( [ 1401.994158] env[69367]: ERROR nova.compute.manager File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 1401.994158] env[69367]: ERROR nova.compute.manager with catch_all(pd.uuid): [ 1401.994158] env[69367]: ERROR nova.compute.manager File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 1401.994158] env[69367]: ERROR nova.compute.manager self.gen.throw(typ, value, traceback) [ 1401.994158] env[69367]: ERROR nova.compute.manager File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 1401.994158] env[69367]: ERROR nova.compute.manager raise exception.ResourceProviderSyncFailed() [ 1401.994158] env[69367]: ERROR nova.compute.manager nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 1401.994158] env[69367]: ERROR nova.compute.manager [ 1402.146405] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Powering off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1402.146780] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-08ff5023-48cd-4652-8afa-9ba8e59e94f6 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.154062] env[69367]: DEBUG oslo_vmware.api [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Waiting for the task: (returnval){ [ 1402.154062] env[69367]: value = "task-4234189" [ 1402.154062] env[69367]: _type = "Task" [ 1402.154062] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1402.163448] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] VM already powered off {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1402.163667] env[69367]: DEBUG nova.compute.manager [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Checking state {{(pid=69367) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1402.164451] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c7a74ce-4db1-4ef8-8f5b-f9292d2009df {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.170506] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Acquiring lock "refresh_cache-96c3ea0d-8912-4877-b834-3ea3ee904b80" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1402.170666] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Acquired lock "refresh_cache-96c3ea0d-8912-4877-b834-3ea3ee904b80" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1402.170877] env[69367]: DEBUG nova.network.neutron [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1402.875941] env[69367]: DEBUG nova.network.neutron [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Updating instance_info_cache with network_info: [{"id": "81716a44-ac7c-4a0a-92b5-533f7e8af4fd", "address": "fa:16:3e:a1:60:dc", "network": {"id": "0b0a82c2-1a70-4174-a75e-5863d3505b2c", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1091682254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.151", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "26a89ab4163e4b9a801dcbf11c953cf3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap81716a44-ac", "ovs_interfaceid": "81716a44-ac7c-4a0a-92b5-533f7e8af4fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1403.378973] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Releasing lock "refresh_cache-96c3ea0d-8912-4877-b834-3ea3ee904b80" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1403.620890] env[69367]: DEBUG nova.compute.manager [req-a0212cbe-7c7c-484a-9275-1fa10af90830 req-ea36fad5-85f4-4d6f-afd5-64242116a016 service nova] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Received event network-vif-unplugged-81716a44-ac7c-4a0a-92b5-533f7e8af4fd {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 1403.621040] env[69367]: DEBUG oslo_concurrency.lockutils [req-a0212cbe-7c7c-484a-9275-1fa10af90830 req-ea36fad5-85f4-4d6f-afd5-64242116a016 service nova] Acquiring lock "96c3ea0d-8912-4877-b834-3ea3ee904b80-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1403.621274] env[69367]: DEBUG oslo_concurrency.lockutils [req-a0212cbe-7c7c-484a-9275-1fa10af90830 req-ea36fad5-85f4-4d6f-afd5-64242116a016 service nova] Lock "96c3ea0d-8912-4877-b834-3ea3ee904b80-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1403.621452] env[69367]: DEBUG oslo_concurrency.lockutils [req-a0212cbe-7c7c-484a-9275-1fa10af90830 req-ea36fad5-85f4-4d6f-afd5-64242116a016 service nova] Lock "96c3ea0d-8912-4877-b834-3ea3ee904b80-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1403.621630] env[69367]: DEBUG nova.compute.manager [req-a0212cbe-7c7c-484a-9275-1fa10af90830 req-ea36fad5-85f4-4d6f-afd5-64242116a016 service nova] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] No waiting events found dispatching network-vif-unplugged-81716a44-ac7c-4a0a-92b5-533f7e8af4fd {{(pid=69367) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1403.621805] env[69367]: WARNING nova.compute.manager [req-a0212cbe-7c7c-484a-9275-1fa10af90830 req-ea36fad5-85f4-4d6f-afd5-64242116a016 service nova] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Received unexpected event network-vif-unplugged-81716a44-ac7c-4a0a-92b5-533f7e8af4fd for instance with vm_state shelved and task_state shelving_offloading. [ 1403.719969] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Destroying instance {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1403.720934] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10912251-1a50-416d-a208-666dd5930dff {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.729069] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Unregistering the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1403.729319] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9e479db7-2095-443b-9d44-a347f945e353 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.798629] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Unregistered the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1403.798973] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Deleting contents of the VM from datastore datastore2 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1403.799246] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Deleting the datastore file [datastore2] 96c3ea0d-8912-4877-b834-3ea3ee904b80 {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1403.799562] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9cdf8fbc-84b1-456e-ad1c-e46e26abdce6 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.807244] env[69367]: DEBUG oslo_vmware.api [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Waiting for the task: (returnval){ [ 1403.807244] env[69367]: value = "task-4234191" [ 1403.807244] env[69367]: _type = "Task" [ 1403.807244] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1403.815622] env[69367]: DEBUG oslo_vmware.api [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4234191, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.317736] env[69367]: DEBUG oslo_vmware.api [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4234191, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.127325} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1404.318038] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Deleted the datastore file {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1404.318238] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Deleted contents of the VM from datastore datastore2 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1404.318418] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Instance destroyed {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1404.341505] env[69367]: INFO nova.scheduler.client.report [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Deleted allocations for instance 96c3ea0d-8912-4877-b834-3ea3ee904b80 [ 1404.847519] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1404.847876] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1404.848145] env[69367]: DEBUG nova.objects.instance [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Lazy-loading 'resources' on Instance uuid 96c3ea0d-8912-4877-b834-3ea3ee904b80 {{(pid=69367) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1405.350945] env[69367]: DEBUG nova.objects.instance [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Lazy-loading 'numa_topology' on Instance uuid 96c3ea0d-8912-4877-b834-3ea3ee904b80 {{(pid=69367) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1405.651048] env[69367]: DEBUG nova.compute.manager [req-81ae6aed-1eaf-4f48-9e96-143994680277 req-6b6c3df3-3c8e-4cda-a890-7094663efadd service nova] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Received event network-changed-81716a44-ac7c-4a0a-92b5-533f7e8af4fd {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 1405.651265] env[69367]: DEBUG nova.compute.manager [req-81ae6aed-1eaf-4f48-9e96-143994680277 req-6b6c3df3-3c8e-4cda-a890-7094663efadd service nova] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Refreshing instance network info cache due to event network-changed-81716a44-ac7c-4a0a-92b5-533f7e8af4fd. {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11772}} [ 1405.651488] env[69367]: DEBUG oslo_concurrency.lockutils [req-81ae6aed-1eaf-4f48-9e96-143994680277 req-6b6c3df3-3c8e-4cda-a890-7094663efadd service nova] Acquiring lock "refresh_cache-96c3ea0d-8912-4877-b834-3ea3ee904b80" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1405.651638] env[69367]: DEBUG oslo_concurrency.lockutils [req-81ae6aed-1eaf-4f48-9e96-143994680277 req-6b6c3df3-3c8e-4cda-a890-7094663efadd service nova] Acquired lock "refresh_cache-96c3ea0d-8912-4877-b834-3ea3ee904b80" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1405.651804] env[69367]: DEBUG nova.network.neutron [req-81ae6aed-1eaf-4f48-9e96-143994680277 req-6b6c3df3-3c8e-4cda-a890-7094663efadd service nova] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Refreshing network info cache for port 81716a44-ac7c-4a0a-92b5-533f7e8af4fd {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1405.853849] env[69367]: DEBUG nova.objects.base [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Object Instance<96c3ea0d-8912-4877-b834-3ea3ee904b80> lazy-loaded attributes: resources,numa_topology {{(pid=69367) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1405.869737] env[69367]: DEBUG nova.scheduler.client.report [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1405.882955] env[69367]: DEBUG nova.scheduler.client.report [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1405.883213] env[69367]: DEBUG nova.compute.provider_tree [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1405.894472] env[69367]: DEBUG nova.scheduler.client.report [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1405.894680] env[69367]: DEBUG nova.compute.provider_tree [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Updating resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 generation from 118 to 119 during operation: update_aggregates {{(pid=69367) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1405.910724] env[69367]: DEBUG nova.scheduler.client.report [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1406.104284] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23537f88-397d-45da-80d1-65fb2d63aa1e {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.112141] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac71d99c-9844-42f8-9541-93dc43ba4672 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.142913] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68105f2d-a6a6-4a7c-8a1f-7b898033692f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.150846] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb59f9a0-769a-4b43-8a96-f6e1a127edba {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.166353] env[69367]: DEBUG nova.compute.provider_tree [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Inventory has not changed in ProviderTree for provider: 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1406.367723] env[69367]: DEBUG nova.network.neutron [req-81ae6aed-1eaf-4f48-9e96-143994680277 req-6b6c3df3-3c8e-4cda-a890-7094663efadd service nova] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Updated VIF entry in instance network info cache for port 81716a44-ac7c-4a0a-92b5-533f7e8af4fd. {{(pid=69367) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1406.368313] env[69367]: DEBUG nova.network.neutron [req-81ae6aed-1eaf-4f48-9e96-143994680277 req-6b6c3df3-3c8e-4cda-a890-7094663efadd service nova] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Updating instance_info_cache with network_info: [{"id": "81716a44-ac7c-4a0a-92b5-533f7e8af4fd", "address": "fa:16:3e:a1:60:dc", "network": {"id": "0b0a82c2-1a70-4174-a75e-5863d3505b2c", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-1091682254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.151", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "26a89ab4163e4b9a801dcbf11c953cf3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap81716a44-ac", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1406.490765] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1406.490962] env[69367]: DEBUG nova.compute.manager [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69367) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11183}} [ 1406.669560] env[69367]: DEBUG nova.scheduler.client.report [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Inventory has not changed for provider 19ddf8be-7305-4f70-8366-52a9957232e6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1406.705057] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Acquiring lock "96c3ea0d-8912-4877-b834-3ea3ee904b80" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1406.871462] env[69367]: DEBUG oslo_concurrency.lockutils [req-81ae6aed-1eaf-4f48-9e96-143994680277 req-6b6c3df3-3c8e-4cda-a890-7094663efadd service nova] Releasing lock "refresh_cache-96c3ea0d-8912-4877-b834-3ea3ee904b80" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1407.175063] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.327s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1407.682402] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2178f7a7-dbd3-481e-ad77-1f13f7908d87 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Lock "96c3ea0d-8912-4877-b834-3ea3ee904b80" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 20.544s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1407.683240] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Lock "96c3ea0d-8912-4877-b834-3ea3ee904b80" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 0.978s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1407.683429] env[69367]: INFO nova.compute.manager [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Unshelving [ 1408.709455] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1408.709838] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1408.710106] env[69367]: DEBUG nova.objects.instance [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Lazy-loading 'pci_requests' on Instance uuid 96c3ea0d-8912-4877-b834-3ea3ee904b80 {{(pid=69367) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1409.215300] env[69367]: DEBUG nova.objects.instance [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Lazy-loading 'numa_topology' on Instance uuid 96c3ea0d-8912-4877-b834-3ea3ee904b80 {{(pid=69367) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1409.718553] env[69367]: INFO nova.compute.claims [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1410.927724] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ee7458b-fae4-4aec-89e1-d33248da3b36 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.936116] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d5bd207-9785-4169-b1cc-7226ce19a04d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.966551] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5439137d-2a0b-4328-9170-04a0638db267 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.974237] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2ff0dd8-47f4-41dd-b200-d561710abd73 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.987428] env[69367]: DEBUG nova.compute.provider_tree [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Inventory has not changed in ProviderTree for provider: 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1411.491139] env[69367]: DEBUG nova.scheduler.client.report [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Inventory has not changed for provider 19ddf8be-7305-4f70-8366-52a9957232e6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1411.996121] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.286s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1412.045095] env[69367]: INFO nova.network.neutron [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Updating port 81716a44-ac7c-4a0a-92b5-533f7e8af4fd with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1413.481238] env[69367]: DEBUG nova.compute.manager [req-9e4de507-2d11-4f60-b0ea-7d16e7260b7c req-4a0da024-ff8a-4944-81b3-0d3669f09578 service nova] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Received event network-vif-plugged-81716a44-ac7c-4a0a-92b5-533f7e8af4fd {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 1413.481522] env[69367]: DEBUG oslo_concurrency.lockutils [req-9e4de507-2d11-4f60-b0ea-7d16e7260b7c req-4a0da024-ff8a-4944-81b3-0d3669f09578 service nova] Acquiring lock "96c3ea0d-8912-4877-b834-3ea3ee904b80-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1413.481661] env[69367]: DEBUG oslo_concurrency.lockutils [req-9e4de507-2d11-4f60-b0ea-7d16e7260b7c req-4a0da024-ff8a-4944-81b3-0d3669f09578 service nova] Lock "96c3ea0d-8912-4877-b834-3ea3ee904b80-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1413.481834] env[69367]: DEBUG oslo_concurrency.lockutils [req-9e4de507-2d11-4f60-b0ea-7d16e7260b7c req-4a0da024-ff8a-4944-81b3-0d3669f09578 service nova] Lock "96c3ea0d-8912-4877-b834-3ea3ee904b80-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1413.482010] env[69367]: DEBUG nova.compute.manager [req-9e4de507-2d11-4f60-b0ea-7d16e7260b7c req-4a0da024-ff8a-4944-81b3-0d3669f09578 service nova] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] No waiting events found dispatching network-vif-plugged-81716a44-ac7c-4a0a-92b5-533f7e8af4fd {{(pid=69367) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1413.482191] env[69367]: WARNING nova.compute.manager [req-9e4de507-2d11-4f60-b0ea-7d16e7260b7c req-4a0da024-ff8a-4944-81b3-0d3669f09578 service nova] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Received unexpected event network-vif-plugged-81716a44-ac7c-4a0a-92b5-533f7e8af4fd for instance with vm_state shelved_offloaded and task_state spawning. [ 1413.562419] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Acquiring lock "refresh_cache-96c3ea0d-8912-4877-b834-3ea3ee904b80" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1413.562419] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Acquired lock "refresh_cache-96c3ea0d-8912-4877-b834-3ea3ee904b80" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1413.562419] env[69367]: DEBUG nova.network.neutron [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Building network info cache for instance {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1414.271681] env[69367]: DEBUG nova.network.neutron [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Updating instance_info_cache with network_info: [{"id": "81716a44-ac7c-4a0a-92b5-533f7e8af4fd", "address": "fa:16:3e:a1:60:dc", "network": {"id": "0b0a82c2-1a70-4174-a75e-5863d3505b2c", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1091682254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.151", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "26a89ab4163e4b9a801dcbf11c953cf3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap81716a44-ac", "ovs_interfaceid": "81716a44-ac7c-4a0a-92b5-533f7e8af4fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1414.775239] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Releasing lock "refresh_cache-96c3ea0d-8912-4877-b834-3ea3ee904b80" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1414.802127] env[69367]: DEBUG nova.virt.hardware [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-19T12:49:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='e1d99e2f80695beda2a5a142ab88bd31',container_format='bare',created_at=2025-05-19T13:04:50Z,direct_url=,disk_format='vmdk',id=33ffe0b0-3ccc-4728-af07-217d2bfd5753,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-2124382721-shelved',owner='26a89ab4163e4b9a801dcbf11c953cf3',properties=ImageMetaProps,protected=,size=31669760,status='active',tags=,updated_at=2025-05-19T13:05:03Z,virtual_size=,visibility=), allow threads: False {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1414.802413] env[69367]: DEBUG nova.virt.hardware [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Flavor limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1414.802577] env[69367]: DEBUG nova.virt.hardware [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Image limits 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1414.802762] env[69367]: DEBUG nova.virt.hardware [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Flavor pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1414.802931] env[69367]: DEBUG nova.virt.hardware [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Image pref 0:0:0 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1414.803076] env[69367]: DEBUG nova.virt.hardware [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69367) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1414.803289] env[69367]: DEBUG nova.virt.hardware [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1414.803452] env[69367]: DEBUG nova.virt.hardware [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1414.803622] env[69367]: DEBUG nova.virt.hardware [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Got 1 possible topologies {{(pid=69367) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1414.803786] env[69367]: DEBUG nova.virt.hardware [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1414.803962] env[69367]: DEBUG nova.virt.hardware [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69367) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1414.805345] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8dd8c40-822b-432e-907c-7e0e46e97fc1 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.814262] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e79910bc-8d9c-4b55-a7e8-0047a76e84bf {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.827526] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a1:60:dc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '92fe29b3-0907-453d-aabb-5559c4bd7c0f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '81716a44-ac7c-4a0a-92b5-533f7e8af4fd', 'vif_model': 'vmxnet3'}] {{(pid=69367) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1414.834712] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1414.834945] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Creating VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1414.835181] env[69367]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8910d3cd-c7f4-409e-a6cd-c13ef4b0cf27 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.853798] env[69367]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1414.853798] env[69367]: value = "task-4234192" [ 1414.853798] env[69367]: _type = "Task" [ 1414.853798] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1414.861351] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234192, 'name': CreateVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.364204] env[69367]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234192, 'name': CreateVM_Task, 'duration_secs': 0.299406} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1415.364434] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Created VM on the ESX host {{(pid=69367) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1415.364988] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/33ffe0b0-3ccc-4728-af07-217d2bfd5753" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1415.365207] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Acquired lock "[datastore2] devstack-image-cache_base/33ffe0b0-3ccc-4728-af07-217d2bfd5753" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1415.365674] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/33ffe0b0-3ccc-4728-af07-217d2bfd5753" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1415.365948] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-73591f64-72e8-4b51-ac23-5c71fab5e22d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.370612] env[69367]: DEBUG oslo_vmware.api [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Waiting for the task: (returnval){ [ 1415.370612] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]522d70d3-df2e-da19-6d87-68e9bf675703" [ 1415.370612] env[69367]: _type = "Task" [ 1415.370612] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1415.378648] env[69367]: DEBUG oslo_vmware.api [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]522d70d3-df2e-da19-6d87-68e9bf675703, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.505037] env[69367]: DEBUG nova.compute.manager [req-bbe35cb1-9b00-4eba-abfc-d6d318fc370c req-323dc207-d9c1-4bb7-9ce4-f1bd7bfd02e1 service nova] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Received event network-changed-81716a44-ac7c-4a0a-92b5-533f7e8af4fd {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 1415.505208] env[69367]: DEBUG nova.compute.manager [req-bbe35cb1-9b00-4eba-abfc-d6d318fc370c req-323dc207-d9c1-4bb7-9ce4-f1bd7bfd02e1 service nova] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Refreshing instance network info cache due to event network-changed-81716a44-ac7c-4a0a-92b5-533f7e8af4fd. {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11772}} [ 1415.505370] env[69367]: DEBUG oslo_concurrency.lockutils [req-bbe35cb1-9b00-4eba-abfc-d6d318fc370c req-323dc207-d9c1-4bb7-9ce4-f1bd7bfd02e1 service nova] Acquiring lock "refresh_cache-96c3ea0d-8912-4877-b834-3ea3ee904b80" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1415.505522] env[69367]: DEBUG oslo_concurrency.lockutils [req-bbe35cb1-9b00-4eba-abfc-d6d318fc370c req-323dc207-d9c1-4bb7-9ce4-f1bd7bfd02e1 service nova] Acquired lock "refresh_cache-96c3ea0d-8912-4877-b834-3ea3ee904b80" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1415.505687] env[69367]: DEBUG nova.network.neutron [req-bbe35cb1-9b00-4eba-abfc-d6d318fc370c req-323dc207-d9c1-4bb7-9ce4-f1bd7bfd02e1 service nova] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Refreshing network info cache for port 81716a44-ac7c-4a0a-92b5-533f7e8af4fd {{(pid=69367) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1415.880692] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Releasing lock "[datastore2] devstack-image-cache_base/33ffe0b0-3ccc-4728-af07-217d2bfd5753" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1415.881148] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Processing image 33ffe0b0-3ccc-4728-af07-217d2bfd5753 {{(pid=69367) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1415.881197] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/33ffe0b0-3ccc-4728-af07-217d2bfd5753/33ffe0b0-3ccc-4728-af07-217d2bfd5753.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1415.881355] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Acquired lock "[datastore2] devstack-image-cache_base/33ffe0b0-3ccc-4728-af07-217d2bfd5753/33ffe0b0-3ccc-4728-af07-217d2bfd5753.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1415.881547] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1415.881803] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c60f1aac-0b08-4282-81d7-47c3a9ebac40 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.899559] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1415.899725] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69367) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1415.900506] env[69367]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-85af6bfb-f630-4e1c-8286-1a84204ac1ef {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.905695] env[69367]: DEBUG oslo_vmware.api [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Waiting for the task: (returnval){ [ 1415.905695] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]527a2c36-aec3-9dd2-fa8e-d1de22debdb1" [ 1415.905695] env[69367]: _type = "Task" [ 1415.905695] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1415.913631] env[69367]: DEBUG oslo_vmware.api [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': session[52de6002-1632-869c-ebd6-f7227cc33a45]527a2c36-aec3-9dd2-fa8e-d1de22debdb1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.208906] env[69367]: DEBUG nova.network.neutron [req-bbe35cb1-9b00-4eba-abfc-d6d318fc370c req-323dc207-d9c1-4bb7-9ce4-f1bd7bfd02e1 service nova] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Updated VIF entry in instance network info cache for port 81716a44-ac7c-4a0a-92b5-533f7e8af4fd. {{(pid=69367) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1416.209352] env[69367]: DEBUG nova.network.neutron [req-bbe35cb1-9b00-4eba-abfc-d6d318fc370c req-323dc207-d9c1-4bb7-9ce4-f1bd7bfd02e1 service nova] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Updating instance_info_cache with network_info: [{"id": "81716a44-ac7c-4a0a-92b5-533f7e8af4fd", "address": "fa:16:3e:a1:60:dc", "network": {"id": "0b0a82c2-1a70-4174-a75e-5863d3505b2c", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1091682254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.151", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "26a89ab4163e4b9a801dcbf11c953cf3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap81716a44-ac", "ovs_interfaceid": "81716a44-ac7c-4a0a-92b5-533f7e8af4fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1416.415713] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Preparing fetch location {{(pid=69367) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1416.415938] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Fetch image to [datastore2] OSTACK_IMG_b5ed9107-9af0-4efe-9bea-9669de8d0309/OSTACK_IMG_b5ed9107-9af0-4efe-9bea-9669de8d0309.vmdk {{(pid=69367) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1416.416111] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Downloading stream optimized image 33ffe0b0-3ccc-4728-af07-217d2bfd5753 to [datastore2] OSTACK_IMG_b5ed9107-9af0-4efe-9bea-9669de8d0309/OSTACK_IMG_b5ed9107-9af0-4efe-9bea-9669de8d0309.vmdk on the data store datastore2 as vApp {{(pid=69367) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1416.416299] env[69367]: DEBUG nova.virt.vmwareapi.images [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Downloading image file data 33ffe0b0-3ccc-4728-af07-217d2bfd5753 to the ESX as VM named 'OSTACK_IMG_b5ed9107-9af0-4efe-9bea-9669de8d0309' {{(pid=69367) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1416.485392] env[69367]: DEBUG oslo_vmware.rw_handles [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1416.485392] env[69367]: value = "resgroup-9" [ 1416.485392] env[69367]: _type = "ResourcePool" [ 1416.485392] env[69367]: }. {{(pid=69367) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1416.485685] env[69367]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-165e80b6-625a-4df2-9d0d-1a60824954de {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.507682] env[69367]: DEBUG oslo_vmware.rw_handles [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Lease: (returnval){ [ 1416.507682] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52de0172-2ee8-8f2f-e3af-c5fcf5d23f98" [ 1416.507682] env[69367]: _type = "HttpNfcLease" [ 1416.507682] env[69367]: } obtained for vApp import into resource pool (val){ [ 1416.507682] env[69367]: value = "resgroup-9" [ 1416.507682] env[69367]: _type = "ResourcePool" [ 1416.507682] env[69367]: }. {{(pid=69367) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1416.508107] env[69367]: DEBUG oslo_vmware.api [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Waiting for the lease: (returnval){ [ 1416.508107] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52de0172-2ee8-8f2f-e3af-c5fcf5d23f98" [ 1416.508107] env[69367]: _type = "HttpNfcLease" [ 1416.508107] env[69367]: } to be ready. {{(pid=69367) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1416.514156] env[69367]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1416.514156] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52de0172-2ee8-8f2f-e3af-c5fcf5d23f98" [ 1416.514156] env[69367]: _type = "HttpNfcLease" [ 1416.514156] env[69367]: } is initializing. {{(pid=69367) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1416.712757] env[69367]: DEBUG oslo_concurrency.lockutils [req-bbe35cb1-9b00-4eba-abfc-d6d318fc370c req-323dc207-d9c1-4bb7-9ce4-f1bd7bfd02e1 service nova] Releasing lock "refresh_cache-96c3ea0d-8912-4877-b834-3ea3ee904b80" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1417.016714] env[69367]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1417.016714] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52de0172-2ee8-8f2f-e3af-c5fcf5d23f98" [ 1417.016714] env[69367]: _type = "HttpNfcLease" [ 1417.016714] env[69367]: } is initializing. {{(pid=69367) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1417.516990] env[69367]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1417.516990] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52de0172-2ee8-8f2f-e3af-c5fcf5d23f98" [ 1417.516990] env[69367]: _type = "HttpNfcLease" [ 1417.516990] env[69367]: } is initializing. {{(pid=69367) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1418.017400] env[69367]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1418.017400] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52de0172-2ee8-8f2f-e3af-c5fcf5d23f98" [ 1418.017400] env[69367]: _type = "HttpNfcLease" [ 1418.017400] env[69367]: } is ready. {{(pid=69367) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1418.017872] env[69367]: DEBUG oslo_vmware.rw_handles [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1418.017872] env[69367]: value = "session[52de6002-1632-869c-ebd6-f7227cc33a45]52de0172-2ee8-8f2f-e3af-c5fcf5d23f98" [ 1418.017872] env[69367]: _type = "HttpNfcLease" [ 1418.017872] env[69367]: }. {{(pid=69367) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1418.018451] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b995277f-6e42-4de5-99a9-99c11290fba9 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.025191] env[69367]: DEBUG oslo_vmware.rw_handles [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52dce87a-11d3-7715-d8ea-97c19a22bbd9/disk-0.vmdk from lease info. {{(pid=69367) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1418.025369] env[69367]: DEBUG oslo_vmware.rw_handles [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Creating HTTP connection to write to file with size = 31669760 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52dce87a-11d3-7715-d8ea-97c19a22bbd9/disk-0.vmdk. {{(pid=69367) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1418.085799] env[69367]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-91e560b3-67a4-4313-9909-96d1cc2330e1 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.208765] env[69367]: DEBUG oslo_vmware.rw_handles [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Completed reading data from the image iterator. {{(pid=69367) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1419.209204] env[69367]: DEBUG oslo_vmware.rw_handles [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52dce87a-11d3-7715-d8ea-97c19a22bbd9/disk-0.vmdk. {{(pid=69367) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1419.209883] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd67124d-9868-4760-905b-56046a1297e3 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.216939] env[69367]: DEBUG oslo_vmware.rw_handles [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52dce87a-11d3-7715-d8ea-97c19a22bbd9/disk-0.vmdk is in state: ready. {{(pid=69367) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1419.217147] env[69367]: DEBUG oslo_vmware.rw_handles [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52dce87a-11d3-7715-d8ea-97c19a22bbd9/disk-0.vmdk. {{(pid=69367) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1419.217360] env[69367]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-feaf19de-9bd9-4e28-9fbd-9992cd0a811f {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.403496] env[69367]: DEBUG oslo_vmware.rw_handles [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52dce87a-11d3-7715-d8ea-97c19a22bbd9/disk-0.vmdk. {{(pid=69367) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1419.403760] env[69367]: INFO nova.virt.vmwareapi.images [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Downloaded image file data 33ffe0b0-3ccc-4728-af07-217d2bfd5753 [ 1419.404666] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44b52ab0-7394-4e99-9978-cc4182bab227 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.421202] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-51aed305-5c30-443f-b616-8c4681c550ed {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.452196] env[69367]: INFO nova.virt.vmwareapi.images [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] The imported VM was unregistered [ 1419.454417] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Caching image {{(pid=69367) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1419.454667] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Creating directory with path [datastore2] devstack-image-cache_base/33ffe0b0-3ccc-4728-af07-217d2bfd5753 {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1419.454948] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-84a99029-324d-4133-ad79-e4c294e11aee {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.494242] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Created directory with path [datastore2] devstack-image-cache_base/33ffe0b0-3ccc-4728-af07-217d2bfd5753 {{(pid=69367) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1419.494407] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_b5ed9107-9af0-4efe-9bea-9669de8d0309/OSTACK_IMG_b5ed9107-9af0-4efe-9bea-9669de8d0309.vmdk to [datastore2] devstack-image-cache_base/33ffe0b0-3ccc-4728-af07-217d2bfd5753/33ffe0b0-3ccc-4728-af07-217d2bfd5753.vmdk. {{(pid=69367) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1419.494667] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-04731ae7-26be-4fba-a4dd-f7168cf9ea95 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.501978] env[69367]: DEBUG oslo_vmware.api [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Waiting for the task: (returnval){ [ 1419.501978] env[69367]: value = "task-4234195" [ 1419.501978] env[69367]: _type = "Task" [ 1419.501978] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.510476] env[69367]: DEBUG oslo_vmware.api [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4234195, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.013933] env[69367]: DEBUG oslo_vmware.api [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4234195, 'name': MoveVirtualDisk_Task} progress is 24%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.513167] env[69367]: DEBUG oslo_vmware.api [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4234195, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.014676] env[69367]: DEBUG oslo_vmware.api [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4234195, 'name': MoveVirtualDisk_Task} progress is 66%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.515964] env[69367]: DEBUG oslo_vmware.api [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4234195, 'name': MoveVirtualDisk_Task} progress is 88%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.017229] env[69367]: DEBUG oslo_vmware.api [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4234195, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.315175} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1422.017518] env[69367]: INFO nova.virt.vmwareapi.ds_util [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_b5ed9107-9af0-4efe-9bea-9669de8d0309/OSTACK_IMG_b5ed9107-9af0-4efe-9bea-9669de8d0309.vmdk to [datastore2] devstack-image-cache_base/33ffe0b0-3ccc-4728-af07-217d2bfd5753/33ffe0b0-3ccc-4728-af07-217d2bfd5753.vmdk. [ 1422.017691] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Cleaning up location [datastore2] OSTACK_IMG_b5ed9107-9af0-4efe-9bea-9669de8d0309 {{(pid=69367) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1422.017893] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_b5ed9107-9af0-4efe-9bea-9669de8d0309 {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1422.018168] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2ec7d944-d8cb-410f-b5af-8d1289bee9b6 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.024589] env[69367]: DEBUG oslo_vmware.api [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Waiting for the task: (returnval){ [ 1422.024589] env[69367]: value = "task-4234196" [ 1422.024589] env[69367]: _type = "Task" [ 1422.024589] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1422.032331] env[69367]: DEBUG oslo_vmware.api [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4234196, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.535382] env[69367]: DEBUG oslo_vmware.api [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4234196, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.036579} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1422.535750] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Deleted the datastore file {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1422.535789] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Releasing lock "[datastore2] devstack-image-cache_base/33ffe0b0-3ccc-4728-af07-217d2bfd5753/33ffe0b0-3ccc-4728-af07-217d2bfd5753.vmdk" {{(pid=69367) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1422.536069] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/33ffe0b0-3ccc-4728-af07-217d2bfd5753/33ffe0b0-3ccc-4728-af07-217d2bfd5753.vmdk to [datastore2] 96c3ea0d-8912-4877-b834-3ea3ee904b80/96c3ea0d-8912-4877-b834-3ea3ee904b80.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1422.536320] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fc4b0820-9f90-4750-9920-8079aaf35ef4 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.543591] env[69367]: DEBUG oslo_vmware.api [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Waiting for the task: (returnval){ [ 1422.543591] env[69367]: value = "task-4234197" [ 1422.543591] env[69367]: _type = "Task" [ 1422.543591] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1422.551325] env[69367]: DEBUG oslo_vmware.api [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4234197, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.055015] env[69367]: DEBUG oslo_vmware.api [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4234197, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.556174] env[69367]: DEBUG oslo_vmware.api [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4234197, 'name': CopyVirtualDisk_Task} progress is 43%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.056534] env[69367]: DEBUG oslo_vmware.api [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4234197, 'name': CopyVirtualDisk_Task} progress is 66%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.557794] env[69367]: DEBUG oslo_vmware.api [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4234197, 'name': CopyVirtualDisk_Task} progress is 88%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.058173] env[69367]: DEBUG oslo_vmware.api [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4234197, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.302365} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1425.058456] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/33ffe0b0-3ccc-4728-af07-217d2bfd5753/33ffe0b0-3ccc-4728-af07-217d2bfd5753.vmdk to [datastore2] 96c3ea0d-8912-4877-b834-3ea3ee904b80/96c3ea0d-8912-4877-b834-3ea3ee904b80.vmdk {{(pid=69367) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1425.059242] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45e977c0-a23c-40a4-af43-03ad210e2d0e {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.081305] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Reconfiguring VM instance instance-0000006e to attach disk [datastore2] 96c3ea0d-8912-4877-b834-3ea3ee904b80/96c3ea0d-8912-4877-b834-3ea3ee904b80.vmdk or device None with type streamOptimized {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1425.081584] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d82d3695-708c-4564-a11d-9cc9469354bc {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.100928] env[69367]: DEBUG oslo_vmware.api [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Waiting for the task: (returnval){ [ 1425.100928] env[69367]: value = "task-4234198" [ 1425.100928] env[69367]: _type = "Task" [ 1425.100928] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1425.108914] env[69367]: DEBUG oslo_vmware.api [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4234198, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.611707] env[69367]: DEBUG oslo_vmware.api [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4234198, 'name': ReconfigVM_Task, 'duration_secs': 0.302246} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1425.612095] env[69367]: DEBUG nova.virt.vmwareapi.volumeops [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Reconfigured VM instance instance-0000006e to attach disk [datastore2] 96c3ea0d-8912-4877-b834-3ea3ee904b80/96c3ea0d-8912-4877-b834-3ea3ee904b80.vmdk or device None with type streamOptimized {{(pid=69367) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1425.612754] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-89730deb-e539-496b-87e9-76fdec85065a {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.620046] env[69367]: DEBUG oslo_vmware.api [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Waiting for the task: (returnval){ [ 1425.620046] env[69367]: value = "task-4234199" [ 1425.620046] env[69367]: _type = "Task" [ 1425.620046] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1425.630224] env[69367]: DEBUG oslo_vmware.api [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4234199, 'name': Rename_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.130951] env[69367]: DEBUG oslo_vmware.api [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4234199, 'name': Rename_Task, 'duration_secs': 0.139767} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1426.130951] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Powering on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1426.130951] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e1bd9459-f294-48ac-83cc-e597828cce0e {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.137537] env[69367]: DEBUG oslo_vmware.api [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Waiting for the task: (returnval){ [ 1426.137537] env[69367]: value = "task-4234200" [ 1426.137537] env[69367]: _type = "Task" [ 1426.137537] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1426.145354] env[69367]: DEBUG oslo_vmware.api [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4234200, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.648060] env[69367]: DEBUG oslo_vmware.api [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4234200, 'name': PowerOnVM_Task, 'duration_secs': 0.489134} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1426.648460] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Powered on the VM {{(pid=69367) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1426.741887] env[69367]: DEBUG nova.compute.manager [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Checking state {{(pid=69367) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1426.742826] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6717094e-526a-4b87-b687-8859a7bfb63b {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.260376] env[69367]: DEBUG oslo_concurrency.lockutils [None req-2c602a22-9aff-48eb-bce8-eb986ab8ff45 tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Lock "96c3ea0d-8912-4877-b834-3ea3ee904b80" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 19.577s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1451.088054] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1451.088054] env[69367]: DEBUG nova.compute.manager [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Cleaning up deleted instances {{(pid=69367) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11864}} [ 1451.591558] env[69367]: DEBUG nova.compute.manager [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] There are 0 instances to clean {{(pid=69367) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11873}} [ 1453.591332] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1455.087313] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1457.088325] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1457.088711] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1458.584564] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1458.584934] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1460.086854] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1460.087274] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1460.591035] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1460.591337] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1460.591526] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1460.591699] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69367) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 1460.592661] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9280ad86-6937-4311-af98-d3105741f38d {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.601514] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db3ef73d-5661-4846-91b6-b0d635f10a2b {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.616917] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaca29ea-9e1b-403c-8bae-4d7f112fa339 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.623990] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dff3127-1380-4da9-bd5a-3d2dce373e3a {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.662019] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180240MB free_disk=0GB free_vcpus=48 pci_devices=None {{(pid=69367) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 1460.662183] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1460.662409] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1461.711804] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance a358ce6d-9826-4ddb-8c2f-51bac8db59d4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1461.712069] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 4e346ed1-36e9-421d-975f-e8bb6f05c0a0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1461.712136] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance e1c7d100-4ad7-4871-970f-bb7562bfc6fc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1461.712230] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 3a3e6eec-fa9e-40fc-af80-cb88b8bf35b9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1461.712381] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance ab365570-ac29-4094-be4c-d49563a465c8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1461.712544] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance c17525ee-d038-4c81-932b-ed74a6de6cb5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1461.712678] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 92c27615-d377-492f-a9db-ff45b2e71537 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1461.712792] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 10ffa1d6-87c3-4e83-9cab-7cdf65c1fe57 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1461.712906] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 011ab7de-98a7-41fc-9e05-e71965c73c09 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1461.713028] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance f8c07fa1-d27c-4d0f-847b-481477cd04bf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1461.713147] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 7f937d89-684b-44f5-9f30-783aeafe99d1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1461.713262] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance ab9d8e3e-65c5-4ac9-920f-3042b8cf2054 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1461.713376] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance c272b0ae-6313-46ab-977c-6de255e77675 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1461.713500] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance d900df05-b65c-4a45-94d1-563afbf9c022 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1461.713599] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 05aae150-5d86-4210-ae7e-8c63e83cb907 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1461.713710] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 42db60d9-e5f7-4925-8f6f-d3884687414a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1461.713821] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Instance 96c3ea0d-8912-4877-b834-3ea3ee904b80 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69367) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1461.714046] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Total usable vcpus: 48, total allocated vcpus: 17 {{(pid=69367) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 1461.714188] env[69367]: DEBUG nova.compute.resource_tracker [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3776MB phys_disk=200GB used_disk=17GB total_vcpus=48 used_vcpus=17 pci_stats=[] {{(pid=69367) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 1461.891309] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2fcf940-290f-4405-9b54-6d2393d145a6 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.899968] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d2c605f-208d-41db-93ac-860ce05a8a94 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.934284] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9038b661-6391-4457-a87a-e339df53b2e1 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.942345] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c168229-b645-4595-84d3-400c0cf1e045 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.955539] env[69367]: DEBUG nova.compute.provider_tree [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1462.476729] env[69367]: ERROR nova.scheduler.client.report [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] [req-2c52ab57-9373-4b43-8b57-8201935c5712] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 19ddf8be-7305-4f70-8366-52a9957232e6. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-2c52ab57-9373-4b43-8b57-8201935c5712"}]} [ 1462.477086] env[69367]: DEBUG oslo_concurrency.lockutils [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.815s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1462.477666] env[69367]: ERROR nova.compute.manager [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Error updating resources for node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28.: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 1462.477666] env[69367]: ERROR nova.compute.manager Traceback (most recent call last): [ 1462.477666] env[69367]: ERROR nova.compute.manager File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 1462.477666] env[69367]: ERROR nova.compute.manager yield [ 1462.477666] env[69367]: ERROR nova.compute.manager File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 1462.477666] env[69367]: ERROR nova.compute.manager self.set_inventory_for_provider( [ 1462.477666] env[69367]: ERROR nova.compute.manager File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 1462.477666] env[69367]: ERROR nova.compute.manager raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 1462.477666] env[69367]: ERROR nova.compute.manager nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/19ddf8be-7305-4f70-8366-52a9957232e6/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-2c52ab57-9373-4b43-8b57-8201935c5712"}]} [ 1462.477666] env[69367]: ERROR nova.compute.manager [ 1462.477666] env[69367]: ERROR nova.compute.manager During handling of the above exception, another exception occurred: [ 1462.477666] env[69367]: ERROR nova.compute.manager [ 1462.477666] env[69367]: ERROR nova.compute.manager Traceback (most recent call last): [ 1462.477666] env[69367]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 11219, in _update_available_resource_for_node [ 1462.477666] env[69367]: ERROR nova.compute.manager self.rt.update_available_resource(context, nodename, [ 1462.477666] env[69367]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/resource_tracker.py", line 965, in update_available_resource [ 1462.477666] env[69367]: ERROR nova.compute.manager self._update_available_resource(context, resources, startup=startup) [ 1462.477666] env[69367]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 1462.477666] env[69367]: ERROR nova.compute.manager return f(*args, **kwargs) [ 1462.477666] env[69367]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1096, in _update_available_resource [ 1462.477666] env[69367]: ERROR nova.compute.manager self._update(context, cn, startup=startup) [ 1462.477666] env[69367]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 1462.477666] env[69367]: ERROR nova.compute.manager self._update_to_placement(context, compute_node, startup) [ 1462.477666] env[69367]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 1462.477666] env[69367]: ERROR nova.compute.manager return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 1462.477666] env[69367]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 1462.477666] env[69367]: ERROR nova.compute.manager return attempt.get(self._wrap_exception) [ 1462.477666] env[69367]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 1462.477666] env[69367]: ERROR nova.compute.manager six.reraise(self.value[0], self.value[1], self.value[2]) [ 1462.477666] env[69367]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 1462.477666] env[69367]: ERROR nova.compute.manager raise value [ 1462.477666] env[69367]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 1462.477666] env[69367]: ERROR nova.compute.manager attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 1462.477666] env[69367]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 1462.477666] env[69367]: ERROR nova.compute.manager self.reportclient.update_from_provider_tree( [ 1462.477666] env[69367]: ERROR nova.compute.manager File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 1462.477666] env[69367]: ERROR nova.compute.manager with catch_all(pd.uuid): [ 1462.477666] env[69367]: ERROR nova.compute.manager File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 1462.477666] env[69367]: ERROR nova.compute.manager self.gen.throw(typ, value, traceback) [ 1462.477666] env[69367]: ERROR nova.compute.manager File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 1462.477666] env[69367]: ERROR nova.compute.manager raise exception.ResourceProviderSyncFailed() [ 1462.477666] env[69367]: ERROR nova.compute.manager nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 1462.477666] env[69367]: ERROR nova.compute.manager [ 1464.706331] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e6ff10cf-700b-4db1-8c10-6f765cdd0e6c tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Acquiring lock "96c3ea0d-8912-4877-b834-3ea3ee904b80" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1464.706711] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e6ff10cf-700b-4db1-8c10-6f765cdd0e6c tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Lock "96c3ea0d-8912-4877-b834-3ea3ee904b80" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1464.706819] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e6ff10cf-700b-4db1-8c10-6f765cdd0e6c tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Acquiring lock "96c3ea0d-8912-4877-b834-3ea3ee904b80-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1464.707070] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e6ff10cf-700b-4db1-8c10-6f765cdd0e6c tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Lock "96c3ea0d-8912-4877-b834-3ea3ee904b80-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1464.707303] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e6ff10cf-700b-4db1-8c10-6f765cdd0e6c tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Lock "96c3ea0d-8912-4877-b834-3ea3ee904b80-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1464.710377] env[69367]: INFO nova.compute.manager [None req-e6ff10cf-700b-4db1-8c10-6f765cdd0e6c tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Terminating instance [ 1465.214540] env[69367]: DEBUG nova.compute.manager [None req-e6ff10cf-700b-4db1-8c10-6f765cdd0e6c tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Start destroying the instance on the hypervisor. {{(pid=69367) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1465.214791] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-e6ff10cf-700b-4db1-8c10-6f765cdd0e6c tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Destroying instance {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1465.215827] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52cf2c0b-a3b8-49e7-a544-8e7e2f47d587 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.224045] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6ff10cf-700b-4db1-8c10-6f765cdd0e6c tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Powering off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1465.224293] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4a8b68a1-f6fd-4b8e-b99c-ad1e4ec5c8a6 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.230939] env[69367]: DEBUG oslo_vmware.api [None req-e6ff10cf-700b-4db1-8c10-6f765cdd0e6c tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Waiting for the task: (returnval){ [ 1465.230939] env[69367]: value = "task-4234201" [ 1465.230939] env[69367]: _type = "Task" [ 1465.230939] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1465.239497] env[69367]: DEBUG oslo_vmware.api [None req-e6ff10cf-700b-4db1-8c10-6f765cdd0e6c tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4234201, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1465.746734] env[69367]: DEBUG oslo_vmware.api [None req-e6ff10cf-700b-4db1-8c10-6f765cdd0e6c tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4234201, 'name': PowerOffVM_Task, 'duration_secs': 0.180353} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1465.747197] env[69367]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6ff10cf-700b-4db1-8c10-6f765cdd0e6c tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Powered off the VM {{(pid=69367) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1465.747523] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-e6ff10cf-700b-4db1-8c10-6f765cdd0e6c tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Unregistering the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1465.747872] env[69367]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-04becad4-6684-42d1-9c3c-32afc56bc27a {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.891392] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-e6ff10cf-700b-4db1-8c10-6f765cdd0e6c tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Unregistered the VM {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1465.891617] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-e6ff10cf-700b-4db1-8c10-6f765cdd0e6c tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Deleting contents of the VM from datastore datastore2 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1465.891800] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6ff10cf-700b-4db1-8c10-6f765cdd0e6c tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Deleting the datastore file [datastore2] 96c3ea0d-8912-4877-b834-3ea3ee904b80 {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1465.892105] env[69367]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8584b849-f271-468d-a0a9-c4b1ce86d9a6 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.899326] env[69367]: DEBUG oslo_vmware.api [None req-e6ff10cf-700b-4db1-8c10-6f765cdd0e6c tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Waiting for the task: (returnval){ [ 1465.899326] env[69367]: value = "task-4234203" [ 1465.899326] env[69367]: _type = "Task" [ 1465.899326] env[69367]: } to complete. {{(pid=69367) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1465.907141] env[69367]: DEBUG oslo_vmware.api [None req-e6ff10cf-700b-4db1-8c10-6f765cdd0e6c tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4234203, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1466.087126] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1466.087339] env[69367]: DEBUG nova.compute.manager [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69367) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11183}} [ 1466.087531] env[69367]: DEBUG oslo_service.periodic_task [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=69367) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1466.087667] env[69367]: DEBUG nova.compute.manager [None req-91493d17-5409-4115-9ff4-ec062188f565 None None] Cleaning up deleted instances with incomplete migration {{(pid=69367) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11902}} [ 1466.409432] env[69367]: DEBUG oslo_vmware.api [None req-e6ff10cf-700b-4db1-8c10-6f765cdd0e6c tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Task: {'id': task-4234203, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.131336} completed successfully. {{(pid=69367) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1466.409660] env[69367]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6ff10cf-700b-4db1-8c10-6f765cdd0e6c tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Deleted the datastore file {{(pid=69367) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1466.409829] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-e6ff10cf-700b-4db1-8c10-6f765cdd0e6c tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Deleted contents of the VM from datastore datastore2 {{(pid=69367) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1466.410020] env[69367]: DEBUG nova.virt.vmwareapi.vmops [None req-e6ff10cf-700b-4db1-8c10-6f765cdd0e6c tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Instance destroyed {{(pid=69367) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1466.410209] env[69367]: INFO nova.compute.manager [None req-e6ff10cf-700b-4db1-8c10-6f765cdd0e6c tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Took 1.20 seconds to destroy the instance on the hypervisor. [ 1466.410480] env[69367]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e6ff10cf-700b-4db1-8c10-6f765cdd0e6c tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69367) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1466.410686] env[69367]: DEBUG nova.compute.manager [-] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Deallocating network for instance {{(pid=69367) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1466.410787] env[69367]: DEBUG nova.network.neutron [-] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] deallocate_for_instance() {{(pid=69367) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1466.871177] env[69367]: DEBUG nova.compute.manager [req-bb4fcb67-f535-45a4-9d7d-ffad8f14b90e req-fba610d8-1656-4302-bf57-8492c9c2d89e service nova] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Received event network-vif-deleted-81716a44-ac7c-4a0a-92b5-533f7e8af4fd {{(pid=69367) external_instance_event /opt/stack/nova/nova/compute/manager.py:11767}} [ 1466.871448] env[69367]: INFO nova.compute.manager [req-bb4fcb67-f535-45a4-9d7d-ffad8f14b90e req-fba610d8-1656-4302-bf57-8492c9c2d89e service nova] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Neutron deleted interface 81716a44-ac7c-4a0a-92b5-533f7e8af4fd; detaching it from the instance and deleting it from the info cache [ 1466.871662] env[69367]: DEBUG nova.network.neutron [req-bb4fcb67-f535-45a4-9d7d-ffad8f14b90e req-fba610d8-1656-4302-bf57-8492c9c2d89e service nova] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1467.356520] env[69367]: DEBUG nova.network.neutron [-] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Updating instance_info_cache with network_info: [] {{(pid=69367) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1467.376140] env[69367]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-28f521a0-7b21-4821-97b3-9d10bdc03aed {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.386372] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-754f1d97-c777-46d1-b25f-4f3147d503a4 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.412948] env[69367]: DEBUG nova.compute.manager [req-bb4fcb67-f535-45a4-9d7d-ffad8f14b90e req-fba610d8-1656-4302-bf57-8492c9c2d89e service nova] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Detach interface failed, port_id=81716a44-ac7c-4a0a-92b5-533f7e8af4fd, reason: Instance 96c3ea0d-8912-4877-b834-3ea3ee904b80 could not be found. {{(pid=69367) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11601}} [ 1467.859736] env[69367]: INFO nova.compute.manager [-] [instance: 96c3ea0d-8912-4877-b834-3ea3ee904b80] Took 1.45 seconds to deallocate network for instance. [ 1468.366575] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e6ff10cf-700b-4db1-8c10-6f765cdd0e6c tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1468.367054] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e6ff10cf-700b-4db1-8c10-6f765cdd0e6c tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1468.367198] env[69367]: DEBUG nova.objects.instance [None req-e6ff10cf-700b-4db1-8c10-6f765cdd0e6c tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Lazy-loading 'resources' on Instance uuid 96c3ea0d-8912-4877-b834-3ea3ee904b80 {{(pid=69367) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1468.889039] env[69367]: DEBUG nova.scheduler.client.report [None req-e6ff10cf-700b-4db1-8c10-6f765cdd0e6c tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Refreshing inventories for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1468.905103] env[69367]: DEBUG nova.scheduler.client.report [None req-e6ff10cf-700b-4db1-8c10-6f765cdd0e6c tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Updating ProviderTree inventory for provider 19ddf8be-7305-4f70-8366-52a9957232e6 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1468.905383] env[69367]: DEBUG nova.compute.provider_tree [None req-e6ff10cf-700b-4db1-8c10-6f765cdd0e6c tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Updating inventory in ProviderTree for provider 19ddf8be-7305-4f70-8366-52a9957232e6 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1468.917283] env[69367]: DEBUG nova.scheduler.client.report [None req-e6ff10cf-700b-4db1-8c10-6f765cdd0e6c tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Refreshing aggregate associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, aggregates: None {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1468.936029] env[69367]: DEBUG nova.scheduler.client.report [None req-e6ff10cf-700b-4db1-8c10-6f765cdd0e6c tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Refreshing trait associations for resource provider 19ddf8be-7305-4f70-8366-52a9957232e6, traits: HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69367) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1469.159884] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eeda4276-19ef-4fc6-8f8d-c2a180d898cf {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.168225] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6248bc67-fc82-4e93-914f-fcd85dd1dd8b {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.199859] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bdb14a5-ed9f-4978-bbfa-da9209411bd9 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.208718] env[69367]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-482e1b5f-ab53-475a-a3a2-7840d0e6a9b0 {{(pid=69367) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.223331] env[69367]: DEBUG nova.compute.provider_tree [None req-e6ff10cf-700b-4db1-8c10-6f765cdd0e6c tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Inventory has not changed in ProviderTree for provider: 19ddf8be-7305-4f70-8366-52a9957232e6 {{(pid=69367) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1469.726997] env[69367]: DEBUG nova.scheduler.client.report [None req-e6ff10cf-700b-4db1-8c10-6f765cdd0e6c tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Inventory has not changed for provider 19ddf8be-7305-4f70-8366-52a9957232e6 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69367) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1470.233068] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e6ff10cf-700b-4db1-8c10-6f765cdd0e6c tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.865s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1470.253350] env[69367]: INFO nova.scheduler.client.report [None req-e6ff10cf-700b-4db1-8c10-6f765cdd0e6c tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Deleted allocations for instance 96c3ea0d-8912-4877-b834-3ea3ee904b80 [ 1470.761763] env[69367]: DEBUG oslo_concurrency.lockutils [None req-e6ff10cf-700b-4db1-8c10-6f765cdd0e6c tempest-AttachVolumeShelveTestJSON-525996393 tempest-AttachVolumeShelveTestJSON-525996393-project-member] Lock "96c3ea0d-8912-4877-b834-3ea3ee904b80" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.055s {{(pid=69367) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}}